hexsha
stringlengths
40
40
size
int64
7
1.04M
ext
stringclasses
10 values
lang
stringclasses
1 value
max_stars_repo_path
stringlengths
4
247
max_stars_repo_name
stringlengths
4
125
max_stars_repo_head_hexsha
stringlengths
40
78
max_stars_repo_licenses
sequencelengths
1
10
max_stars_count
int64
1
368k
max_stars_repo_stars_event_min_datetime
stringlengths
24
24
max_stars_repo_stars_event_max_datetime
stringlengths
24
24
max_issues_repo_path
stringlengths
4
247
max_issues_repo_name
stringlengths
4
125
max_issues_repo_head_hexsha
stringlengths
40
78
max_issues_repo_licenses
sequencelengths
1
10
max_issues_count
int64
1
116k
max_issues_repo_issues_event_min_datetime
stringlengths
24
24
max_issues_repo_issues_event_max_datetime
stringlengths
24
24
max_forks_repo_path
stringlengths
4
247
max_forks_repo_name
stringlengths
4
125
max_forks_repo_head_hexsha
stringlengths
40
78
max_forks_repo_licenses
sequencelengths
1
10
max_forks_count
int64
1
105k
max_forks_repo_forks_event_min_datetime
stringlengths
24
24
max_forks_repo_forks_event_max_datetime
stringlengths
24
24
content
stringlengths
5
1.04M
avg_line_length
float64
1.77
618k
max_line_length
int64
1
970k
alphanum_fraction
float64
0
1
original_content
stringlengths
7
1.04M
filtered:remove_non_ascii
int64
0
514k
filtered:remove_delete_markers
int64
0
0
f0dbbe1279aab50b7683686f8eb0d599271d1d74
4,229
py
Python
tfsnippet/layers/convolutional/shifted.py
QianLiGui/tfsnippet
63adaf04d2ffff8dec299623627d55d4bacac598
[ "MIT" ]
63
2018-06-06T11:56:40.000Z
2022-03-22T08:00:59.000Z
tfsnippet/layers/convolutional/shifted.py
QianLiGui/tfsnippet
63adaf04d2ffff8dec299623627d55d4bacac598
[ "MIT" ]
39
2018-07-04T12:40:53.000Z
2022-02-09T23:48:44.000Z
tfsnippet/layers/convolutional/shifted.py
QianLiGui/tfsnippet
63adaf04d2ffff8dec299623627d55d4bacac598
[ "MIT" ]
34
2018-06-25T09:59:22.000Z
2022-02-23T12:46:33.000Z
import tensorflow as tf from tensorflow.contrib.framework import add_arg_scope from tfsnippet.utils import (add_name_and_scope_arg_doc, get_static_shape, get_default_scope_name) from .conv2d_ import conv2d from .utils import validate_conv2d_size_tuple, validate_conv2d_input __all__ = ['shifted_conv2d'] @add_arg_scope @add_name_and_scope_arg_doc def shifted_conv2d(input, out_channels, kernel_size, spatial_shift, strides=(1, 1), channels_last=True, conv_fn=conv2d, name=None, scope=None, **kwargs): """ 2D convolution with shifted input. This method first pads `input` according to the `kernel_size` and `spatial_shift` arguments, then do 2D convolution (using `conv_fn`) with "VALID" padding. Args: input (Tensor): The input tensor, at least 4-d. out_channels (int): The channel numbers of the output. kernel_size (int or (int, int)): Kernel size over spatial dimensions. spatial_shift: The `spatial_shift` should be a tuple with two elements (corresponding to height and width spatial axes), and the elements can only be -1, 0 or 1. If the shift for a specific axis is `-1`, then `kernel_size - 1` zeros will be padded at the end of that axis. If the shift is `0`, then `(kernel_size - 1) // 2` zeros will be padded at the front, and `kernel_size // 2` zeros will be padded at the end that axis. Otherwise if the shift is `1`, then `kernel_size + 1` zeros will be padded at the front of that axis. strides (int or (int, int)): Strides over spatial dimensions. channels_last (bool): Whether or not the channel axis is the last axis in `input`? (i.e., the data format is "NHWC") conv_fn: The 2D convolution function. (default :func:`conv2d`) \\**kwargs: Other named parameters passed to `conv_fn`. Returns: tf.Tensor: The output tensor. """ spatial_shift = tuple(spatial_shift) if len(spatial_shift) != 2 or \ any(s not in (-1, 0, 1) for s in spatial_shift): raise TypeError('`spatial_shift` must be a tuple with two elements, ' 'and the elements can only be -1, 0 or 1.') kernel_size = validate_conv2d_size_tuple('kernel_size', kernel_size) if 'padding' in kwargs: raise ValueError('`padding` argument is not supported.') input, _, _ = validate_conv2d_input(input, channels_last=channels_last) rank = len(get_static_shape(input)) pads = [(0, 0)] * rank is_shifted_conv2d = False spatial_start = -3 if channels_last else -2 for i, (ksize, shift) in enumerate(zip(kernel_size, spatial_shift)): axis = i + spatial_start if shift == 0: pads[axis] = ((ksize - 1) // 2, ksize // 2) elif shift == -1: pads[axis] = (0, ksize - 1) is_shifted_conv2d = True else: assert(shift == 1) pads[axis] = (ksize - 1, 0) is_shifted_conv2d = True # fast routine: no shift, use ordinary conv_fn with padding == 'SAME' if not is_shifted_conv2d: return conv_fn( input=input, out_channels=out_channels, kernel_size=kernel_size, strides=strides, channels_last=channels_last, padding='SAME', scope=scope, name=name, **kwargs ) # slow routine: pad and use conv_fn with padding == 'VALID' with tf.variable_scope(scope, default_name=name or 'shifted_conv2d'): output = tf.pad(input, pads) output = conv_fn( input=output, out_channels=out_channels, kernel_size=kernel_size, strides=strides, channels_last=channels_last, padding='VALID', scope=get_default_scope_name( getattr(conv_fn, '__name__', None) or 'conv_fn'), **kwargs ) return output
38.099099
78
0.594703
import tensorflow as tf from tensorflow.contrib.framework import add_arg_scope from tfsnippet.utils import (add_name_and_scope_arg_doc, get_static_shape, get_default_scope_name) from .conv2d_ import conv2d from .utils import validate_conv2d_size_tuple, validate_conv2d_input __all__ = ['shifted_conv2d'] @add_arg_scope @add_name_and_scope_arg_doc def shifted_conv2d(input, out_channels, kernel_size, spatial_shift, strides=(1, 1), channels_last=True, conv_fn=conv2d, name=None, scope=None, **kwargs): """ 2D convolution with shifted input. This method first pads `input` according to the `kernel_size` and `spatial_shift` arguments, then do 2D convolution (using `conv_fn`) with "VALID" padding. Args: input (Tensor): The input tensor, at least 4-d. out_channels (int): The channel numbers of the output. kernel_size (int or (int, int)): Kernel size over spatial dimensions. spatial_shift: The `spatial_shift` should be a tuple with two elements (corresponding to height and width spatial axes), and the elements can only be -1, 0 or 1. If the shift for a specific axis is `-1`, then `kernel_size - 1` zeros will be padded at the end of that axis. If the shift is `0`, then `(kernel_size - 1) // 2` zeros will be padded at the front, and `kernel_size // 2` zeros will be padded at the end that axis. Otherwise if the shift is `1`, then `kernel_size + 1` zeros will be padded at the front of that axis. strides (int or (int, int)): Strides over spatial dimensions. channels_last (bool): Whether or not the channel axis is the last axis in `input`? (i.e., the data format is "NHWC") conv_fn: The 2D convolution function. (default :func:`conv2d`) \\**kwargs: Other named parameters passed to `conv_fn`. Returns: tf.Tensor: The output tensor. """ spatial_shift = tuple(spatial_shift) if len(spatial_shift) != 2 or \ any(s not in (-1, 0, 1) for s in spatial_shift): raise TypeError('`spatial_shift` must be a tuple with two elements, ' 'and the elements can only be -1, 0 or 1.') kernel_size = validate_conv2d_size_tuple('kernel_size', kernel_size) if 'padding' in kwargs: raise ValueError('`padding` argument is not supported.') input, _, _ = validate_conv2d_input(input, channels_last=channels_last) rank = len(get_static_shape(input)) pads = [(0, 0)] * rank is_shifted_conv2d = False spatial_start = -3 if channels_last else -2 for i, (ksize, shift) in enumerate(zip(kernel_size, spatial_shift)): axis = i + spatial_start if shift == 0: pads[axis] = ((ksize - 1) // 2, ksize // 2) elif shift == -1: pads[axis] = (0, ksize - 1) is_shifted_conv2d = True else: assert(shift == 1) pads[axis] = (ksize - 1, 0) is_shifted_conv2d = True # fast routine: no shift, use ordinary conv_fn with padding == 'SAME' if not is_shifted_conv2d: return conv_fn( input=input, out_channels=out_channels, kernel_size=kernel_size, strides=strides, channels_last=channels_last, padding='SAME', scope=scope, name=name, **kwargs ) # slow routine: pad and use conv_fn with padding == 'VALID' with tf.variable_scope(scope, default_name=name or 'shifted_conv2d'): output = tf.pad(input, pads) output = conv_fn( input=output, out_channels=out_channels, kernel_size=kernel_size, strides=strides, channels_last=channels_last, padding='VALID', scope=get_default_scope_name( getattr(conv_fn, '__name__', None) or 'conv_fn'), **kwargs ) return output
0
0
0a1e0a42a99ee6178be40181a0dbd0ab8c1ffad0
1,651
py
Python
ietf/meeting/feeds.py
unofficial-mirror/ietfdb
ce54adb30dc7299c6eb4d42b9aa9d2c2929c1a81
[ "BSD-3-Clause" ]
null
null
null
ietf/meeting/feeds.py
unofficial-mirror/ietfdb
ce54adb30dc7299c6eb4d42b9aa9d2c2929c1a81
[ "BSD-3-Clause" ]
null
null
null
ietf/meeting/feeds.py
unofficial-mirror/ietfdb
ce54adb30dc7299c6eb4d42b9aa9d2c2929c1a81
[ "BSD-3-Clause" ]
null
null
null
# Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import os from django.contrib.syndication.views import Feed from django.utils.feedgenerator import Atom1Feed from django.conf import settings from django.utils.html import escape from ietf.doc.models import Document class LatestMeetingMaterialFeed(Feed): feed_type = Atom1Feed link = "/meeting/" language = "en" base_url = "https://www.ietf.org/proceedings/" def items(self): objs = [] # FIXME: why aren't other materials types in here? for doc in Document.objects.filter(type__in=("agenda", "minutes", "slides")).order_by('-time')[:60]: obj = dict( title=doc.type_id, group_acronym=doc.name.split("-")[2], date=doc.time, # FIXME: why isn't this using gref or href? link=self.base_url + os.path.join(doc.get_file_path(), doc.uploaded_filename)[len(settings.AGENDA_PATH):], author="" ) objs.append(obj) return objs def title(self, obj): return "Meeting Materials Activity" def item_title(self, item): return "%s: %s" % (item["group_acronym"], escape(item["title"])) def item_description(self, item): return "" def item_link(self, item): return item['link'] def item_pubdate(self, item): return item['date'] def item_author_name(self, item): return item['author'] def item_author_email(self, item): return None
28.465517
122
0.623864
# Copyright The IETF Trust 2007-2019, All Rights Reserved # -*- coding: utf-8 -*- from __future__ import absolute_import, print_function, unicode_literals import os from django.contrib.syndication.views import Feed from django.utils.feedgenerator import Atom1Feed from django.conf import settings from django.utils.html import escape from ietf.doc.models import Document class LatestMeetingMaterialFeed(Feed): feed_type = Atom1Feed link = "/meeting/" language = "en" base_url = "https://www.ietf.org/proceedings/" def items(self): objs = [] # FIXME: why aren't other materials types in here? for doc in Document.objects.filter(type__in=("agenda", "minutes", "slides")).order_by('-time')[:60]: obj = dict( title=doc.type_id, group_acronym=doc.name.split("-")[2], date=doc.time, # FIXME: why isn't this using gref or href? link=self.base_url + os.path.join(doc.get_file_path(), doc.uploaded_filename)[len(settings.AGENDA_PATH):], author="" ) objs.append(obj) return objs def title(self, obj): return "Meeting Materials Activity" def item_title(self, item): return "%s: %s" % (item["group_acronym"], escape(item["title"])) def item_description(self, item): return "" def item_link(self, item): return item['link'] def item_pubdate(self, item): return item['date'] def item_author_name(self, item): return item['author'] def item_author_email(self, item): return None
0
0
2319e91137f9efd7f01e9d5c2b0581591fd6d9c7
3,560
py
Python
Controller/DatabaseManager.py
TheConstructRIT/Machine-Swipe-System
857e5c5205638a212736d58ac9e1ae27fa300946
[ "MIT" ]
null
null
null
Controller/DatabaseManager.py
TheConstructRIT/Machine-Swipe-System
857e5c5205638a212736d58ac9e1ae27fa300946
[ "MIT" ]
null
null
null
Controller/DatabaseManager.py
TheConstructRIT/Machine-Swipe-System
857e5c5205638a212736d58ac9e1ae27fa300946
[ "MIT" ]
null
null
null
""" Zachary Cook Manages calls to the databases. """ import sqlite3 from Controller import ConfigurationManager from Model import Time,User """ Class representing the database. """ class DatabaseManager: """ Creates a database manager. """ def __init__(self,location="database.sqlite"): self.database = sqlite3.connect(location,check_same_thread=False) # Initialize the database. self.initializeTables() self.closeOldSessions() """ Initializes the tables if they aren't defined. """ def initializeTables(self): # Initialize the users table. try: self.database.execute("CREATE TABLE Users (Id char(9),AccessType STRING);") self.database.commit() except: pass # Initialize the users table. try: self.database.execute("CREATE TABLE Sessions (Id char(9),StartTime BIGINT,EndTime BIGINT);") self.database.commit() except: pass """ Marks open sessions with a finish time of -1. This should only happen if there was power-lose during the operation of the system. """ def closeOldSessions(self): self.database.execute("UPDATE Sessions SET EndTime = -1 WHERE EndTime = 0;") self.database.commit() """ Returns the type of user. """ def getUserAccessType(self,id): # Return the first result if it exists. results = self.database.execute("SELECT AccessType FROM Users WHERE Id = ?;",[id]).fetchall() if len(results) > 0: return results[0][0] # Return UNAUTHORIZED if there was no result. return "UNAUTHORIZED" """ Sets the access type of a user. """ def setUserAccessType(self,id,accessType): # If the access type is unauthorized, remove the user. if accessType == "UNAUTHORIZED": self.database.execute("DELETE FROM Users WHERE Id = ?;",[id]) self.database.commit() return # Add or update the type if a record exists. if len(self.database.execute("SELECT * FROM Users WHERE Id = ?",[id]).fetchall()) > 0: self.database.execute("UPDATE Users SET AccessType = ? WHERE Id = ?;",[accessType,id]) else: self.database.execute("INSERT INTO Users VALUES (?,?);",[id,accessType]) self.database.commit() """ Logs the session starting. """ def sessionStarted(self,session): self.database.execute("INSERT INTO Sessions VALUES (?,?,0);",[session.getUser().getId(),session.getStartTime()]) self.database.commit() """ Logs the session ending. """ def sessionEnded(self,session): self.database.execute("UPDATE Sessions SET EndTime = ? WHERE EndTime = 0 AND Id = ? AND StartTime = ?;",[Time.getCurrentTimestamp(),session.getUser().getId(),session.getStartTime()]) self.database.commit() staticDatabaseManager = None """ Returns the static database instance. """ def getDatabase(): # Create the static instance. global staticDatabaseManager if staticDatabaseManager is None: staticDatabaseManager = DatabaseManager() # Return the static database. return staticDatabaseManager """ Returns the User for the given id (non-hash). If there is no registered User, None is returned. """ def getUser(id): accessType = getDatabase().getUserAccessType(id) if accessType == "UNAUTHORIZED": return User.User(id,0,accessType) else: return User.User(id,ConfigurationManager.getDefaultSessionTime(),accessType) """ Sets the access type of a user. """ def setUserAccessType(id,accessType): getDatabase().setUserAccessType(id,accessType) """ Registers a session being started. """ def sessionStarted(session): getDatabase().sessionStarted(session) """ Registers a session ended. """ def sessionEnded(session): getDatabase().sessionEnded(session)
25.248227
184
0.72191
""" Zachary Cook Manages calls to the databases. """ import sqlite3 from Controller import ConfigurationManager from Model import Time,User """ Class representing the database. """ class DatabaseManager: """ Creates a database manager. """ def __init__(self,location="database.sqlite"): self.database = sqlite3.connect(location,check_same_thread=False) # Initialize the database. self.initializeTables() self.closeOldSessions() """ Initializes the tables if they aren't defined. """ def initializeTables(self): # Initialize the users table. try: self.database.execute("CREATE TABLE Users (Id char(9),AccessType STRING);") self.database.commit() except: pass # Initialize the users table. try: self.database.execute("CREATE TABLE Sessions (Id char(9),StartTime BIGINT,EndTime BIGINT);") self.database.commit() except: pass """ Marks open sessions with a finish time of -1. This should only happen if there was power-lose during the operation of the system. """ def closeOldSessions(self): self.database.execute("UPDATE Sessions SET EndTime = -1 WHERE EndTime = 0;") self.database.commit() """ Returns the type of user. """ def getUserAccessType(self,id): # Return the first result if it exists. results = self.database.execute("SELECT AccessType FROM Users WHERE Id = ?;",[id]).fetchall() if len(results) > 0: return results[0][0] # Return UNAUTHORIZED if there was no result. return "UNAUTHORIZED" """ Sets the access type of a user. """ def setUserAccessType(self,id,accessType): # If the access type is unauthorized, remove the user. if accessType == "UNAUTHORIZED": self.database.execute("DELETE FROM Users WHERE Id = ?;",[id]) self.database.commit() return # Add or update the type if a record exists. if len(self.database.execute("SELECT * FROM Users WHERE Id = ?",[id]).fetchall()) > 0: self.database.execute("UPDATE Users SET AccessType = ? WHERE Id = ?;",[accessType,id]) else: self.database.execute("INSERT INTO Users VALUES (?,?);",[id,accessType]) self.database.commit() """ Logs the session starting. """ def sessionStarted(self,session): self.database.execute("INSERT INTO Sessions VALUES (?,?,0);",[session.getUser().getId(),session.getStartTime()]) self.database.commit() """ Logs the session ending. """ def sessionEnded(self,session): self.database.execute("UPDATE Sessions SET EndTime = ? WHERE EndTime = 0 AND Id = ? AND StartTime = ?;",[Time.getCurrentTimestamp(),session.getUser().getId(),session.getStartTime()]) self.database.commit() staticDatabaseManager = None """ Returns the static database instance. """ def getDatabase(): # Create the static instance. global staticDatabaseManager if staticDatabaseManager is None: staticDatabaseManager = DatabaseManager() # Return the static database. return staticDatabaseManager """ Returns the User for the given id (non-hash). If there is no registered User, None is returned. """ def getUser(id): accessType = getDatabase().getUserAccessType(id) if accessType == "UNAUTHORIZED": return User.User(id,0,accessType) else: return User.User(id,ConfigurationManager.getDefaultSessionTime(),accessType) """ Sets the access type of a user. """ def setUserAccessType(id,accessType): getDatabase().setUserAccessType(id,accessType) """ Registers a session being started. """ def sessionStarted(session): getDatabase().sessionStarted(session) """ Registers a session ended. """ def sessionEnded(session): getDatabase().sessionEnded(session)
0
0
ca15d3a39d10b75749c3989ddaa269a5e7fa2f8d
1,739
py
Python
src/hacenada/abstract.py
corydodt/Hacenada
32421d071bf684e56e629c84cfb6d585be5be846
[ "MIT" ]
1
2021-03-23T10:21:27.000Z
2021-03-23T10:21:27.000Z
src/hacenada/abstract.py
corydodt/Hacenada
32421d071bf684e56e629c84cfb6d585be5be846
[ "MIT" ]
1
2021-01-15T04:12:12.000Z
2021-01-15T04:12:12.000Z
src/hacenada/abstract.py
corydodt/Hacenada
32421d071bf684e56e629c84cfb6d585be5be846
[ "MIT" ]
null
null
null
""" Abstract types """ from abc import ABC, abstractmethod import typing from hacenada.const import STR_DICT class SessionStorage(ABC): """ Provide access to the session's underlying storage through any mechanism """ answer: typing.Any meta: typing.Any @property def script_path(self): """ The path to the script associated with this storage Concrete method, implementing this is optional """ @script_path.setter def script_path(self, value): """ Set the path to the script associated with this storage Concrete method, implementing this is optional """ @property # type: ignore @abstractmethod def description(self): """ A description of this hacenada session """ @description.setter # type: ignore @abstractmethod def description(self, val): """ Set the description """ @abstractmethod def save_answer(self, answer: STR_DICT): """ Save a single answer """ @abstractmethod def update_meta(self, **kw): """ Update meta properties based on keywords (e.g. description="hello world") """ @abstractmethod def get_answer(self, label: str): """ Look up a single answer by str """ def drop(self): """ Delete the storage Concrete method, implementing this is optional """ class Render(ABC): """ Rendering operations for question types """ @abstractmethod def render(self, step, context) -> STR_DICT: """ Output a question to a device, should return a 0-item label:value dict """
20.702381
81
0.592869
""" Abstract types """ from abc import ABC, abstractmethod import typing from hacenada.const import STR_DICT class SessionStorage(ABC): """ Provide access to the session's underlying storage through any mechanism """ answer: typing.Any meta: typing.Any @property def script_path(self): """ The path to the script associated with this storage Concrete method, implementing this is optional """ @script_path.setter def script_path(self, value): """ Set the path to the script associated with this storage Concrete method, implementing this is optional """ @property # type: ignore @abstractmethod def description(self): """ A description of this hacenada session """ @description.setter # type: ignore @abstractmethod def description(self, val): """ Set the description """ @abstractmethod def save_answer(self, answer: STR_DICT): """ Save a single answer """ @abstractmethod def update_meta(self, **kw): """ Update meta properties based on keywords (e.g. description="hello world") """ @abstractmethod def get_answer(self, label: str): """ Look up a single answer by str """ def drop(self): """ Delete the storage Concrete method, implementing this is optional """ class Render(ABC): """ Rendering operations for question types """ @abstractmethod def render(self, step, context) -> STR_DICT: """ Output a question to a device, should return a 0-item label:value dict """
0
0
4a5f5f16ca4b191cb32bb78237a6ad6567772ab5
6,965
py
Python
compounddb/tools.py
gitanna/chemminetools
1cfef18bcd773421c95f8662857f31e363211cdc
[ "BSD-4-Clause-UC" ]
2
2017-12-11T23:17:40.000Z
2020-08-17T08:35:01.000Z
compounddb/tools.py
gitanna/chemminetools
1cfef18bcd773421c95f8662857f31e363211cdc
[ "BSD-4-Clause-UC" ]
null
null
null
compounddb/tools.py
gitanna/chemminetools
1cfef18bcd773421c95f8662857f31e363211cdc
[ "BSD-4-Clause-UC" ]
null
null
null
#!/usr/bin/python # -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from logging import root, basicConfig import openbabel import sys import re import tempfile import os import codecs import md5 import compounddb.sdfiterator import string import random cur_dir = os.path.dirname(__file__) from compounddb.models import * basicConfig() inchiconv = openbabel.OBConversion() ######################### # sdf-related processings ######################### def get_sdf_tags(sdf): """parse the sdf tags""" tag_pattern = re.compile(""">\s+<([^>]+)>[^ ]* ([^>$]+)""") tags = tag_pattern.findall(sdf) tagdict = dict() # process each tag for (name, value) in tags: tagdict[name.strip()] = value.strip() return tagdict def parse_annotation(sdf, namekey): """ parse annotation from SDF file """ # parse the sdf tags moldata = get_sdf_tags(sdf) # --- inchi inchiconv.SetInAndOutFormats('sdf', 'Inchi') mol = openbabel.OBMol() res = inchiconv.ReadString(mol, codecs.encode(sdf, 'utf-8')) if mol.Empty(): root.warning(' --> ERROR on sdf') raise Exception # standard data generated # --- inchi/formula/weight moldata['inchi'] = inchiconv.WriteString(mol).strip() moldata['formula'] = mol.GetFormula() moldata['id'] = mol.GetTitle() if moldata['id'] == '': moldata['id'] = 'unspecified_' \ + ''.join(random.sample(string.digits, 6)) mol.AddHydrogens() moldata['weight'] = str(mol.GetMolWt()) # if the name is not in sdf: if not moldata.has_key(namekey): moldata[namekey] = '' # smiles inchiconv.SetInAndOutFormats('sdf', 'smi') mol = openbabel.OBMol() res = inchiconv.ReadString(mol, codecs.encode(sdf, 'utf-8')) if mol.Empty(): root.warning(' --> ERROR on sdf') raise Exception moldata['smiles'] = inchiconv.WriteString(mol).strip() return moldata ############################ # single compound operations ############################ def _update_single_compound( moldata, sdf, library, nameky, idkey, ): # sdf file s = SDFFile(sdffile=sdf) s.save() sdfid = s.id def insert_single_compound( moldata, sdf, namekey, idkey, user, ): """ insert single compound into database """ cid = moldata[idkey] name = moldata[namekey] if '\n' in name: name = name.split('\n')[0] # compound c = Compound( cid=cid, name=name, formula=moldata['formula'], weight=moldata['weight'], inchi=moldata['inchi'], smiles=moldata['smiles'], user=user, ) # sdf_file=s) c.save() c_id = c.id root.warning(' -->new compound inserted: c_id=%s, cid=%s' % (c_id, cid)) # sdf file s = SDFFile(sdffile=sdf, compound=c) s.save() sdfid = s.id return c.id ##################################### # Physical Chemical Property - JOELib ##################################### def gen_joelib_property(sdf): """run and parse the property output """ # save the input in FS t = tempfile.NamedTemporaryFile(suffix='.sdf') t.write(codecs.encode(sdf, 'utf-8')) t.flush() # prepare the output file (f, out) = tempfile.mkstemp(suffix='.sdf') os.close(f) # convert cmd = \ """JAVA_HOME=/opt/jre/ JOELIB2=/opt/JOELib2-alpha-20070303/ /opt/JOELib2-alpha-20070303/moleculeConversion.sh +d +h -iSDF -osdf "%s" "%s" > /dev/null""" \ % (t.name, out) root.warning(' --> running:%s' % cmd) if os.system(cmd) != 0: os.unlink(out) raise 'cannot run JOELib' # read and parse f = file(out) tags = get_sdf_tags(codecs.decode(f.read(), 'utf-8')) f.close() # clean os.unlink(out) return tags ###### # MISC ###### def update_mw( lib_name, lib_ver, input, rev=False, ): """goal: to update MW value with hydrogen added .... when calculating JOELib .... 'rev': in ChemMineV2, some libraries got compound ID and compound name switched, like 'Aurora'""" import datetime begin = datetime.datetime.now() print 'starts at: %s' % begin library = get_library(lib_name, lib_ver) mw = PropertyField.objects.get(name='MW') fp = file(input) line1 = fp.readline() count = 1 for line in fp: (cid, weight) = line.strip().split('\t') try: if rev: c = Compound.objects.get(library=library, name=cid) else: c = Compound.objects.get(library=library, cid=cid) except Compound.DoesNotExist: print 'not found: line %s, cid=%s' % (count, cid) pass try: p = Property.objects.get(compound=c, field=mw) p.value = weight p.save() except Property.DoesNotExist: p = Property(field=mw, compound=c, value=weight) p.save() print 'new p for %s, line %s' % (cid, count) except: print '----->line %s, cid=%s' % (count, cid) pass count += 1 # print "%s: %s -> %s", (cid, old, weight) fp.close() end = datetime.datetime.now() print 'ends at: %s' % end return def del_duplicate_mw(lib_name, lib_ver): """some libraries has 2 mw """ library = get_library(lib_name, lib_ver) mw = PropertyField.objects.get(name='MW') for c in library.compound_set.all(): if c.property_set.filter(field=mw).count() == 2: c.property_set.filter(field=mw)[1].delete() return def fix_kegg_cid(): """some cid in KEGG still has '(noMol)', fix them""" library = get_library('KEGG', 0) count = 0 for c in library.compound_set.all(): if '(noMol)' in c.cid: old = c.cid print old c.cid = old.strip('(noMol)') c.save() count += 1 print '%s compounds updated with new cid' % count return def format_sdf_for_qsar(sdffile, output, ID_tag): """Cerius2 uses 1st line in SDF as ID tag .... some sdf has blank 1st line, so we need to format SDF .... by filling cid to 1st line in SDF""" fp = file(output, 'w') for sdf in sdfiterator.sdf_iter(sdffile): tagdict = get_sdf_tags(sdf) cid = tagdict[ID_tag] fp.write('%s\n' % cid) fp.write(sdf.split('\n', 1)[1].split('M END')[0]) fp.write('M END\n') fp.write('''> <%s> %s ''' % (ID_tag, cid)) fp.write('$$$$\n') fp.close() return def list_all_cid_from_sdf(sdffile, ID_tag, outfile): fp = file(outfile, 'w') for sdf in sdfiterator.sdf_iter(sdffile): tagdict = get_sdf_tags(sdf) cid = tagdict[ID_tag] fp.write('%s\n' % cid) fp.close() return
21.902516
162
0.560086
#!/usr/bin/python # -*- coding: utf-8 -*- from django.db import models from django.contrib.auth.models import User from logging import root, basicConfig import openbabel import sys import re import tempfile import os import codecs import md5 import compounddb.sdfiterator import string import random cur_dir = os.path.dirname(__file__) from compounddb.models import * basicConfig() inchiconv = openbabel.OBConversion() ######################### # sdf-related processings ######################### def get_sdf_tags(sdf): """parse the sdf tags""" tag_pattern = re.compile(""">\s+<([^>]+)>[^ ]* ([^>$]+)""") tags = tag_pattern.findall(sdf) tagdict = dict() # process each tag for (name, value) in tags: tagdict[name.strip()] = value.strip() return tagdict def parse_annotation(sdf, namekey): """ parse annotation from SDF file """ # parse the sdf tags moldata = get_sdf_tags(sdf) # --- inchi inchiconv.SetInAndOutFormats('sdf', 'Inchi') mol = openbabel.OBMol() res = inchiconv.ReadString(mol, codecs.encode(sdf, 'utf-8')) if mol.Empty(): root.warning(' --> ERROR on sdf') raise Exception # standard data generated # --- inchi/formula/weight moldata['inchi'] = inchiconv.WriteString(mol).strip() moldata['formula'] = mol.GetFormula() moldata['id'] = mol.GetTitle() if moldata['id'] == '': moldata['id'] = 'unspecified_' \ + ''.join(random.sample(string.digits, 6)) mol.AddHydrogens() moldata['weight'] = str(mol.GetMolWt()) # if the name is not in sdf: if not moldata.has_key(namekey): moldata[namekey] = '' # smiles inchiconv.SetInAndOutFormats('sdf', 'smi') mol = openbabel.OBMol() res = inchiconv.ReadString(mol, codecs.encode(sdf, 'utf-8')) if mol.Empty(): root.warning(' --> ERROR on sdf') raise Exception moldata['smiles'] = inchiconv.WriteString(mol).strip() return moldata ############################ # single compound operations ############################ def _update_single_compound( moldata, sdf, library, nameky, idkey, ): # sdf file s = SDFFile(sdffile=sdf) s.save() sdfid = s.id def insert_single_compound( moldata, sdf, namekey, idkey, user, ): """ insert single compound into database """ cid = moldata[idkey] name = moldata[namekey] if '\n' in name: name = name.split('\n')[0] # compound c = Compound( cid=cid, name=name, formula=moldata['formula'], weight=moldata['weight'], inchi=moldata['inchi'], smiles=moldata['smiles'], user=user, ) # sdf_file=s) c.save() c_id = c.id root.warning(' -->new compound inserted: c_id=%s, cid=%s' % (c_id, cid)) # sdf file s = SDFFile(sdffile=sdf, compound=c) s.save() sdfid = s.id return c.id ##################################### # Physical Chemical Property - JOELib ##################################### def gen_joelib_property(sdf): """run and parse the property output """ # save the input in FS t = tempfile.NamedTemporaryFile(suffix='.sdf') t.write(codecs.encode(sdf, 'utf-8')) t.flush() # prepare the output file (f, out) = tempfile.mkstemp(suffix='.sdf') os.close(f) # convert cmd = \ """JAVA_HOME=/opt/jre/ JOELIB2=/opt/JOELib2-alpha-20070303/ /opt/JOELib2-alpha-20070303/moleculeConversion.sh +d +h -iSDF -osdf "%s" "%s" > /dev/null""" \ % (t.name, out) root.warning(' --> running:%s' % cmd) if os.system(cmd) != 0: os.unlink(out) raise 'cannot run JOELib' # read and parse f = file(out) tags = get_sdf_tags(codecs.decode(f.read(), 'utf-8')) f.close() # clean os.unlink(out) return tags ###### # MISC ###### def update_mw( lib_name, lib_ver, input, rev=False, ): """goal: to update MW value with hydrogen added .... when calculating JOELib .... 'rev': in ChemMineV2, some libraries got compound ID and compound name switched, like 'Aurora'""" import datetime begin = datetime.datetime.now() print 'starts at: %s' % begin library = get_library(lib_name, lib_ver) mw = PropertyField.objects.get(name='MW') fp = file(input) line1 = fp.readline() count = 1 for line in fp: (cid, weight) = line.strip().split('\t') try: if rev: c = Compound.objects.get(library=library, name=cid) else: c = Compound.objects.get(library=library, cid=cid) except Compound.DoesNotExist: print 'not found: line %s, cid=%s' % (count, cid) pass try: p = Property.objects.get(compound=c, field=mw) p.value = weight p.save() except Property.DoesNotExist: p = Property(field=mw, compound=c, value=weight) p.save() print 'new p for %s, line %s' % (cid, count) except: print '----->line %s, cid=%s' % (count, cid) pass count += 1 # print "%s: %s -> %s", (cid, old, weight) fp.close() end = datetime.datetime.now() print 'ends at: %s' % end return def del_duplicate_mw(lib_name, lib_ver): """some libraries has 2 mw """ library = get_library(lib_name, lib_ver) mw = PropertyField.objects.get(name='MW') for c in library.compound_set.all(): if c.property_set.filter(field=mw).count() == 2: c.property_set.filter(field=mw)[1].delete() return def fix_kegg_cid(): """some cid in KEGG still has '(noMol)', fix them""" library = get_library('KEGG', 0) count = 0 for c in library.compound_set.all(): if '(noMol)' in c.cid: old = c.cid print old c.cid = old.strip('(noMol)') c.save() count += 1 print '%s compounds updated with new cid' % count return def format_sdf_for_qsar(sdffile, output, ID_tag): """Cerius2 uses 1st line in SDF as ID tag .... some sdf has blank 1st line, so we need to format SDF .... by filling cid to 1st line in SDF""" fp = file(output, 'w') for sdf in sdfiterator.sdf_iter(sdffile): tagdict = get_sdf_tags(sdf) cid = tagdict[ID_tag] fp.write('%s\n' % cid) fp.write(sdf.split('\n', 1)[1].split('M END')[0]) fp.write('M END\n') fp.write('''> <%s> %s ''' % (ID_tag, cid)) fp.write('$$$$\n') fp.close() return def list_all_cid_from_sdf(sdffile, ID_tag, outfile): fp = file(outfile, 'w') for sdf in sdfiterator.sdf_iter(sdffile): tagdict = get_sdf_tags(sdf) cid = tagdict[ID_tag] fp.write('%s\n' % cid) fp.close() return
0
0
7333e00acece91b14003ff4ab485dc5fbb7d6ece
725
py
Python
reservation_units/migrations/0040_reservationunit_tax_percentage.py
SuviVappula/tilavarauspalvelu-core
ad7dec36e392a7b2927e2f825c3b0eb29b700793
[ "MIT" ]
null
null
null
reservation_units/migrations/0040_reservationunit_tax_percentage.py
SuviVappula/tilavarauspalvelu-core
ad7dec36e392a7b2927e2f825c3b0eb29b700793
[ "MIT" ]
null
null
null
reservation_units/migrations/0040_reservationunit_tax_percentage.py
SuviVappula/tilavarauspalvelu-core
ad7dec36e392a7b2927e2f825c3b0eb29b700793
[ "MIT" ]
null
null
null
# Generated by Django 3.1.14 on 2021-12-13 11:06 import django.db.models.deletion from django.db import migrations, models import reservation_units.models class Migration(migrations.Migration): dependencies = [ ('reservation_units', '0039_taxpercentage'), ] operations = [ migrations.AddField( model_name='reservationunit', name='tax_percentage', field=models.ForeignKey(default=reservation_units.models.get_default_tax_percentage, help_text='The percentage of tax included in the price', on_delete=django.db.models.deletion.PROTECT, related_name='reservation_units', to='reservation_units.taxpercentage', verbose_name='Tax percentage'), ), ]
32.954545
302
0.721379
# Generated by Django 3.1.14 on 2021-12-13 11:06 import django.db.models.deletion from django.db import migrations, models import reservation_units.models class Migration(migrations.Migration): dependencies = [ ('reservation_units', '0039_taxpercentage'), ] operations = [ migrations.AddField( model_name='reservationunit', name='tax_percentage', field=models.ForeignKey(default=reservation_units.models.get_default_tax_percentage, help_text='The percentage of tax included in the price', on_delete=django.db.models.deletion.PROTECT, related_name='reservation_units', to='reservation_units.taxpercentage', verbose_name='Tax percentage'), ), ]
0
0
a1a9646cdfbe9e115bc034e382782125b3a992b9
569
py
Python
xtapi/__init__.py
istommao/xtapi
acc81493a11adea7f3bb75773bf8683a3ea74b9d
[ "MIT" ]
null
null
null
xtapi/__init__.py
istommao/xtapi
acc81493a11adea7f3bb75773bf8683a3ea74b9d
[ "MIT" ]
null
null
null
xtapi/__init__.py
istommao/xtapi
acc81493a11adea7f3bb75773bf8683a3ea74b9d
[ "MIT" ]
null
null
null
"""xtapi""" from fastapi import ( Query, Path, Body, Cookie, Header, Form, File, UploadFile, Request, Response, status, Depends, APIRouter, HTTPException, BackgroundTasks ) from .main import MainApp from .templates import Templates __all__ = [ 'Query', 'Path', 'Body', 'Cookie', 'Header', 'Form', 'File', 'UploadFile', 'status', 'Request', 'Response', 'Depends', 'APIRouter', 'HTTPException', 'BackgroundTasks', 'MainApp', 'Templates' ]
12.931818
32
0.544815
"""xtapi""" from fastapi import ( Query, Path, Body, Cookie, Header, Form, File, UploadFile, Request, Response, status, Depends, APIRouter, HTTPException, BackgroundTasks ) from .main import MainApp from .templates import Templates __all__ = [ 'Query', 'Path', 'Body', 'Cookie', 'Header', 'Form', 'File', 'UploadFile', 'status', 'Request', 'Response', 'Depends', 'APIRouter', 'HTTPException', 'BackgroundTasks', 'MainApp', 'Templates' ]
0
0
42665268d49b3fed30aab6d47b1012fe77337c56
4,226
py
Python
tensortrade/core/component.py
tttza/tensortrade
bdc83c0ded7281f835ef5cb5bc8f57694bb28e87
[ "Apache-2.0" ]
2
2022-02-07T18:53:50.000Z
2022-02-08T04:19:10.000Z
tensortrade/core/component.py
tttza/tensortrade
bdc83c0ded7281f835ef5cb5bc8f57694bb28e87
[ "Apache-2.0" ]
null
null
null
tensortrade/core/component.py
tttza/tensortrade
bdc83c0ded7281f835ef5cb5bc8f57694bb28e87
[ "Apache-2.0" ]
1
2022-02-07T18:53:31.000Z
2022-02-07T18:53:31.000Z
from abc import ABC, ABCMeta from typing import Any from . import registry from tensortrade.core.context import TradingContext, Context from tensortrade.core.base import Identifiable class InitContextMeta(ABCMeta): """Metaclass that executes `__init__` of instance in its core. This class works with the `TradingContext` class to ensure the correct data is being given to the instance created by a concrete class that has subclassed `Component`. """ def __call__(cls, *args, **kwargs) -> 'InitContextMeta': """ Parameters ---------- args : positional arguments to give constructor of subclass of `Component` kwargs : keyword arguments to give constructor of subclass of `Component` Returns ------- `Component` An instance of a concrete class the subclasses `Component` """ context = TradingContext.get_context() registered_name = registry.registry()[cls] data = context.data.get(registered_name, {}) config = {**context.shared, **data} instance = cls.__new__(cls, *args, **kwargs) setattr(instance, 'context', Context(**config)) instance.__init__(*args, **kwargs) return instance class ContextualizedMixin(object): """A mixin that is to be mixed with any class that must function in a contextual setting. """ @property def context(self) -> Context: """Gets the `Context` the object is under. Returns ------- `Context` The context the object is under. """ return self._context @context.setter def context(self, context: Context) -> None: """Sets the context for the object. Parameters ---------- context : `Context` The context to set for the object. """ self._context = context class Component(ABC, ContextualizedMixin, Identifiable, metaclass=InitContextMeta): """The main class for setting up components to be used in the `TradingEnv`. This class if responsible for providing a common way in which different components of the library can be created. Specifically, it enables the creation of components from a `TradingContext`. Therefore making the creation of complex environments simpler where there are only a few things that need to be changed from case to case. Attributes ---------- registered_name : str The name under which constructor arguments are to be given in a dictionary and passed to a `TradingContext`. """ registered_name = None def __init_subclass__(cls, **kwargs) -> None: """Constructs the concrete subclass of `Component`. In constructing the subclass, the concrete subclass is also registered into the project level registry. Parameters ---------- kwargs : keyword arguments The keyword arguments to be provided to the concrete subclass of `Component` to create an instance. """ super().__init_subclass__(**kwargs) if cls not in registry.registry(): registry.register(cls, cls.registered_name) def default(self, key: str, value: Any, kwargs: dict = None) -> Any: """Resolves which defaults value to use for construction. A concrete subclass will use this method to resolve which default value it should use when creating an instance. The default value should go to the value specified for the variable within the `TradingContext`. If that one is not provided it will resolve to `value`. Parameters ---------- key : str The name of the attribute to be resolved for the class. value : any The `value` the attribute should be set to if not provided in the `TradingContext`. kwargs : dict, optional The dictionary to search through for the value associated with `key`. """ if not kwargs: return self.context.get(key, None) or value return self.context.get(key, None) or kwargs.get(key, value)
32.507692
88
0.635589
from abc import ABC, ABCMeta from typing import Any from . import registry from tensortrade.core.context import TradingContext, Context from tensortrade.core.base import Identifiable class InitContextMeta(ABCMeta): """Metaclass that executes `__init__` of instance in its core. This class works with the `TradingContext` class to ensure the correct data is being given to the instance created by a concrete class that has subclassed `Component`. """ def __call__(cls, *args, **kwargs) -> 'InitContextMeta': """ Parameters ---------- args : positional arguments to give constructor of subclass of `Component` kwargs : keyword arguments to give constructor of subclass of `Component` Returns ------- `Component` An instance of a concrete class the subclasses `Component` """ context = TradingContext.get_context() registered_name = registry.registry()[cls] data = context.data.get(registered_name, {}) config = {**context.shared, **data} instance = cls.__new__(cls, *args, **kwargs) setattr(instance, 'context', Context(**config)) instance.__init__(*args, **kwargs) return instance class ContextualizedMixin(object): """A mixin that is to be mixed with any class that must function in a contextual setting. """ @property def context(self) -> Context: """Gets the `Context` the object is under. Returns ------- `Context` The context the object is under. """ return self._context @context.setter def context(self, context: Context) -> None: """Sets the context for the object. Parameters ---------- context : `Context` The context to set for the object. """ self._context = context class Component(ABC, ContextualizedMixin, Identifiable, metaclass=InitContextMeta): """The main class for setting up components to be used in the `TradingEnv`. This class if responsible for providing a common way in which different components of the library can be created. Specifically, it enables the creation of components from a `TradingContext`. Therefore making the creation of complex environments simpler where there are only a few things that need to be changed from case to case. Attributes ---------- registered_name : str The name under which constructor arguments are to be given in a dictionary and passed to a `TradingContext`. """ registered_name = None def __init_subclass__(cls, **kwargs) -> None: """Constructs the concrete subclass of `Component`. In constructing the subclass, the concrete subclass is also registered into the project level registry. Parameters ---------- kwargs : keyword arguments The keyword arguments to be provided to the concrete subclass of `Component` to create an instance. """ super().__init_subclass__(**kwargs) if cls not in registry.registry(): registry.register(cls, cls.registered_name) def default(self, key: str, value: Any, kwargs: dict = None) -> Any: """Resolves which defaults value to use for construction. A concrete subclass will use this method to resolve which default value it should use when creating an instance. The default value should go to the value specified for the variable within the `TradingContext`. If that one is not provided it will resolve to `value`. Parameters ---------- key : str The name of the attribute to be resolved for the class. value : any The `value` the attribute should be set to if not provided in the `TradingContext`. kwargs : dict, optional The dictionary to search through for the value associated with `key`. """ if not kwargs: return self.context.get(key, None) or value return self.context.get(key, None) or kwargs.get(key, value)
0
0
360b9d69d4522185d1aa103f09cae5f806771a7a
314
py
Python
sample/fun.py
henryneu/Python
41bdbe73944116dc5bbb27d5770d6f45c20276a5
[ "Apache-2.0" ]
1
2017-03-02T02:59:47.000Z
2017-03-02T02:59:47.000Z
sample/fun.py
henryneu/Python
41bdbe73944116dc5bbb27d5770d6f45c20276a5
[ "Apache-2.0" ]
null
null
null
sample/fun.py
henryneu/Python
41bdbe73944116dc5bbb27d5770d6f45c20276a5
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from math import sqrt from math import sin # # def same(x, *fs): f = [f(x) for f in fs] return f def do_fun(x=[], *fu): fx = [f(x_i) for x_i in x for f in fu] return fx print(same(3, abs, sqrt, sin)) print(do_fun([1, 2, 4, 9], abs, sqrt, sin))
17.444444
43
0.60828
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from math import sqrt from math import sin # 函数作为参数传入 # 使用可变参数 def same(x, *fs): f = [f(x) for f in fs] return f def do_fun(x=[], *fu): fx = [f(x_i) for x_i in x for f in fu] return fx print(same(3, abs, sqrt, sin)) print(do_fun([1, 2, 4, 9], abs, sqrt, sin))
42
0
d503b7c3004d0aec97a03aa45fb4af6a06bd8cfd
1,537
py
Python
stepic_rec_2.py
pis2pis2/stepik_auto_tests_course
5bd5df105848582b58620373055e2f4fc945e5e2
[ "MIT" ]
null
null
null
stepic_rec_2.py
pis2pis2/stepik_auto_tests_course
5bd5df105848582b58620373055e2f4fc945e5e2
[ "MIT" ]
null
null
null
stepic_rec_2.py
pis2pis2/stepik_auto_tests_course
5bd5df105848582b58620373055e2f4fc945e5e2
[ "MIT" ]
null
null
null
from selenium import webdriver import time try: # link = "http://suninjuly.github.io/registration1.html" link = "http://suninjuly.github.io/registration2.html" browser = webdriver.Chrome() browser.get(link) # , input_first_name = browser.find_element_by_tag_name("input") input_first_name.send_keys("Ivan") input_last_name = browser.find_element_by_css_selector('input[placeholder="Input your last name"]') input_last_name.send_keys("Petrov") input_email = browser.find_element_by_css_selector("[placeholder='Input your email']") input_email.send_keys("test@mail.com") # button = browser.find_element_by_css_selector("button.btn") button.click() # , # time.sleep(3) # , welcome_text_elt = browser.find_element_by_tag_name("h1") # welcome_text welcome_text_elt welcome_text = welcome_text_elt.text # assert , assert "Congratulations! You have successfully registered!" == welcome_text print(" . 10 ...") finally: # time.sleep(10) # browser.close() time.sleep(2) browser.quit()
37.487805
103
0.745608
from selenium import webdriver import time try: # link = "http://suninjuly.github.io/registration1.html" link = "http://suninjuly.github.io/registration2.html" browser = webdriver.Chrome() browser.get(link) # Ваш код, который заполняет обязательные поля input_first_name = browser.find_element_by_tag_name("input") input_first_name.send_keys("Ivan") input_last_name = browser.find_element_by_css_selector('input[placeholder="Input your last name"]') input_last_name.send_keys("Petrov") input_email = browser.find_element_by_css_selector("[placeholder='Input your email']") input_email.send_keys("test@mail.com") # Отправляем заполненную форму button = browser.find_element_by_css_selector("button.btn") button.click() # Проверяем, что смогли зарегистрироваться # ждем загрузки страницы time.sleep(3) # находим элемент, содержащий текст welcome_text_elt = browser.find_element_by_tag_name("h1") # записываем в переменную welcome_text текст из элемента welcome_text_elt welcome_text = welcome_text_elt.text # с помощью assert проверяем, что ожидаемый текст совпадает с текстом на странице сайта assert "Congratulations! You have successfully registered!" == welcome_text print("Тест успешно завершен. 10 сек на закрытие браузера...") finally: # ожидание чтобы визуально оценить результаты прохождения скрипта time.sleep(10) # закрываем браузер после всех манипуляций browser.close() time.sleep(2) browser.quit()
768
0
a513ba215a5ee45deeea7edac6742335f7fa9630
919
py
Python
stock_rv_ranker/metrics/ranknet.py
Sci-Inference/stock-return-volatility-ranker
96dd865cac146c2cadea369df9552b9698dd05be
[ "Apache-2.0" ]
null
null
null
stock_rv_ranker/metrics/ranknet.py
Sci-Inference/stock-return-volatility-ranker
96dd865cac146c2cadea369df9552b9698dd05be
[ "Apache-2.0" ]
null
null
null
stock_rv_ranker/metrics/ranknet.py
Sci-Inference/stock-return-volatility-ranker
96dd865cac146c2cadea369df9552b9698dd05be
[ "Apache-2.0" ]
1
2022-01-23T06:35:44.000Z
2022-01-23T06:35:44.000Z
import numpy as np # https://gist.github.com/bwhite/3726239 def precision_at_k(r, k): assert k >= 1 r = np.asarray(r)[:k] != 0 if r.size != k: raise ValueError('Relevance score length < k') return np.mean(r) def average_precision(r): r = np.asarray(r) != 0 out = [precision_at_k(r, k + 1) for k in range(r.size) if r[k]] if not out: return 0. return np.mean(out) def mean_average_precision(rs): return np.mean([average_precision(r) for r in rs]) def dcg_at_k(r, k, method=0): r = np.asfarray(r)[:k] if r.size: if method == 0: return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1))) elif method == 1: return np.sum(r / np.log2(np.arange(2, r.size + 2))) else: raise ValueError('method must be 0 or 1.') return 0. def ndcg_at_k(r, k, method=0): dcg_max = dcg_at_k(sorted(r, reverse=True), k, method) if not dcg_max: return 0. return dcg_at_k(r, k, method) / dcg_max
21.372093
66
0.64309
import numpy as np # https://gist.github.com/bwhite/3726239 def precision_at_k(r, k): assert k >= 1 r = np.asarray(r)[:k] != 0 if r.size != k: raise ValueError('Relevance score length < k') return np.mean(r) def average_precision(r): r = np.asarray(r) != 0 out = [precision_at_k(r, k + 1) for k in range(r.size) if r[k]] if not out: return 0. return np.mean(out) def mean_average_precision(rs): return np.mean([average_precision(r) for r in rs]) def dcg_at_k(r, k, method=0): r = np.asfarray(r)[:k] if r.size: if method == 0: return r[0] + np.sum(r[1:] / np.log2(np.arange(2, r.size + 1))) elif method == 1: return np.sum(r / np.log2(np.arange(2, r.size + 2))) else: raise ValueError('method must be 0 or 1.') return 0. def ndcg_at_k(r, k, method=0): dcg_max = dcg_at_k(sorted(r, reverse=True), k, method) if not dcg_max: return 0. return dcg_at_k(r, k, method) / dcg_max
0
0
0b90e2988d72868e586b38128130fb3bcab2cdf2
1,486
py
Python
training/components/data/data_split.py
anifort/vertex-mlops-airlines
5a213836070bcbe72419239f05dd15a42bdebd19
[ "MIT" ]
null
null
null
training/components/data/data_split.py
anifort/vertex-mlops-airlines
5a213836070bcbe72419239f05dd15a42bdebd19
[ "MIT" ]
null
null
null
training/components/data/data_split.py
anifort/vertex-mlops-airlines
5a213836070bcbe72419239f05dd15a42bdebd19
[ "MIT" ]
null
null
null
from kfp.v2.dsl import ( component, Input, Output, Dataset, Artifact, HTML, ) @component( packages_to_install=[ "dask[dataframe]==2021.12.0", "gcsfs==2021.11.1"] ) def data_split_comp( dataset: Input[Dataset], train_set: Output[Dataset], validation_set: Output[Dataset], test_set: Output[Dataset], train_data_size: float = 0.8, validation_data_size: float = 0.1, test_data_size: float = 0.1, seed: int = 0 ) -> None: if (train_data_size+validation_data_size+test_data_size!=1): raise ValueError('Train, Validation and Test data splits should add up to 1. Training:{}, Validation:{}, Test:{}'.format(train_data_size, validation_data_size, test_data_size)) import numpy as np import dask.dataframe as dd df = dd.read_csv(dataset.uri+"/data_*.csv") df = df.compute() np.random.seed(seed) perm = np.random.permutation(df.index) m = len(df.index) train_end = int(train_data_size * m) validate_end = int(validation_data_size * m) + train_end train = df.iloc[perm[:train_end]] validate = df.iloc[perm[train_end:validate_end]] test = df.iloc[perm[validate_end:]] train_set.uri = train_set.uri validation_set.uri = validation_set.uri test_set.uri = test_set.uri train.to_csv(train_set.uri, index=False) validate.to_csv(validation_set.uri, index=False) test.to_csv(test_set.uri, index=False)
28.037736
185
0.658816
from kfp.v2.dsl import ( component, Input, Output, Dataset, Artifact, HTML, ) @component( packages_to_install=[ "dask[dataframe]==2021.12.0", "gcsfs==2021.11.1"] ) def data_split_comp( dataset: Input[Dataset], train_set: Output[Dataset], validation_set: Output[Dataset], test_set: Output[Dataset], train_data_size: float = 0.8, validation_data_size: float = 0.1, test_data_size: float = 0.1, seed: int = 0 ) -> None: if (train_data_size+validation_data_size+test_data_size!=1): raise ValueError('Train, Validation and Test data splits should add up to 1. Training:{}, Validation:{}, Test:{}'.format(train_data_size, validation_data_size, test_data_size)) import numpy as np import dask.dataframe as dd df = dd.read_csv(dataset.uri+"/data_*.csv") df = df.compute() np.random.seed(seed) perm = np.random.permutation(df.index) m = len(df.index) train_end = int(train_data_size * m) validate_end = int(validation_data_size * m) + train_end train = df.iloc[perm[:train_end]] validate = df.iloc[perm[train_end:validate_end]] test = df.iloc[perm[validate_end:]] train_set.uri = train_set.uri validation_set.uri = validation_set.uri test_set.uri = test_set.uri train.to_csv(train_set.uri, index=False) validate.to_csv(validation_set.uri, index=False) test.to_csv(test_set.uri, index=False)
0
0
8f5bbfeb8bfe3c37315237b9e16a94e21b9c4579
723
py
Python
projects/rpg/src/ex_teams_generator2/config/config.py
japinol7/some-examples
a31ab64f861a7e37685582a9fb92ac58f86295d9
[ "MIT" ]
1
2020-07-25T23:28:56.000Z
2020-07-25T23:28:56.000Z
projects/rpg/src/ex_teams_generator2/config/config.py
japinol7/some_examples
a31ab64f861a7e37685582a9fb92ac58f86295d9
[ "MIT" ]
null
null
null
projects/rpg/src/ex_teams_generator2/config/config.py
japinol7/some_examples
a31ab64f861a7e37685582a9fb92ac58f86295d9
[ "MIT" ]
null
null
null
import logging GROUP_SEPARATOR = f"{'-' * 10}" N_TEAMS = 42 N_MEMBERS = 3 N_TEAMS_MAX = 50 N_MEMBERS_MAX = 15 BODY_TEAMS_KEY = 'teams' BODY_ERRORS_KEY = 'errors' ERROR_TAG = 'Error' ERROR_MAX_MSG = f"User input Error. Maximum {N_TEAMS_MAX} teams and {N_MEMBERS_MAX} members for team. " \ f"Values must be numbers!" ERROR_NOT_ENOUGH_MSG = 'Not enough Characters to generate this team' CALC_TEAM_MEMBER_MAX_TRIES = 100 ERROR_MAX_TRIES_MSG = f"Max tries exceeded while choosing a team member: {CALC_TEAM_MEMBER_MAX_TRIES}. Name: %s" LOGGER_FORMAT = '%(asctime)s %(levelname)s %(name)s: %(message)s' logging.basicConfig(format=LOGGER_FORMAT) log = logging.getLogger(__name__) log.setLevel(logging.DEBUG)
27.807692
112
0.748271
import logging GROUP_SEPARATOR = f"{'-' * 10}" N_TEAMS = 42 N_MEMBERS = 3 N_TEAMS_MAX = 50 N_MEMBERS_MAX = 15 BODY_TEAMS_KEY = 'teams' BODY_ERRORS_KEY = 'errors' ERROR_TAG = 'Error' ERROR_MAX_MSG = f"User input Error. Maximum {N_TEAMS_MAX} teams and {N_MEMBERS_MAX} members for team. " \ f"Values must be numbers!" ERROR_NOT_ENOUGH_MSG = 'Not enough Characters to generate this team' CALC_TEAM_MEMBER_MAX_TRIES = 100 ERROR_MAX_TRIES_MSG = f"Max tries exceeded while choosing a team member: {CALC_TEAM_MEMBER_MAX_TRIES}. Name: %s" LOGGER_FORMAT = '%(asctime)s %(levelname)s %(name)s: %(message)s' logging.basicConfig(format=LOGGER_FORMAT) log = logging.getLogger(__name__) log.setLevel(logging.DEBUG)
0
0
da08051bd47a3da223c1b5849a5d7df3dec70611
49,656
py
Python
pmagpy/tsunashawfuncs.py
yamamon75/PmagPy
fa5b189800a239683fc17c6b312cdfdd839a46c3
[ "BSD-3-Clause" ]
null
null
null
pmagpy/tsunashawfuncs.py
yamamon75/PmagPy
fa5b189800a239683fc17c6b312cdfdd839a46c3
[ "BSD-3-Clause" ]
null
null
null
pmagpy/tsunashawfuncs.py
yamamon75/PmagPy
fa5b189800a239683fc17c6b312cdfdd839a46c3
[ "BSD-3-Clause" ]
null
null
null
import matplotlib as mpl import matplotlib.pyplot as plt import multiprocessing as multi import numpy as np import os import pandas as pd import pmagpy import pmagpy.ipmag as ipmag import pmagpy.pmag as pmag import pmagpy.pmagplotlib as pmagplotlib import re import scipy.integrate as integrate import scipy.stats as stats import seaborn as sns import SPD.lib.leastsq_jacobian as lib_k import sys from datetime import datetime as dt from importlib import reload from multiprocessing import Pool from scipy.stats import linregress def API_param_combine(sid_df,afnrm,aftrm1,trm1_star_min,minN): # ## calculating first heating parameters ntrmRegs1=[] # used_df=sid_df[sid_df.treat>=trm1_star_min] used_df=used_df[['treat','nrm','trm1_star']] trm1_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + [[used_df, afnrm,\ used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm1_star','nrm']] p=Pool(multi.cpu_count()) ntrmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) ntrmRegs1.columns=['n_n','slope_n','r_n','dAIC_n','frac_n',\ 'step_min_n','step_max','beta_n','krv_n','krvd_n','f_resid_n'] p.close() p.terminate() print('[calculated for', len(ntrmRegs1),\ 'step-combinations for 1st heating parameters',\ '(', trm1_star_min, '-', trm1_star_max, 'mT)]') #print(ntrmRegs1) # ## calculating second heating parameters trmRegs1=[] # interval serach from ZERO up to MAX trm2_star_min=sid_df['treat'].tolist()[0] used_df=sid_df[sid_df.treat>=trm2_star_min] used_df=used_df[['treat','trm1','trm2_star']] trm2_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + [[used_df, aftrm1,\ used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm2_star','trm1']] p=Pool(multi.cpu_count()) trmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) trmRegs1.columns=['n_t','slope_t','r_t','dAIC_t','frac_t',\ 'step_min_t','step_max','beta_t','krv_t','krvd_t','f_resid_t'] p.close() p.terminate() print('[calculated for', len(trmRegs1),\ 'step-combinations for 2nd heating parameters',\ '(', trm2_star_min, '-', trm2_star_max, 'mT)]') #print(trmRegs1) # print('[merge the combinations for H_min_n >= H_min_t with common H_max]') combinedRegs0=[] combinedRegs0=pd.merge(ntrmRegs1, trmRegs1, on='step_max', how='outer') combinedRegs1=[] combinedRegs1=combinedRegs0[combinedRegs0.step_min_n>=combinedRegs0.step_min_t] print(' ', len(combinedRegs0), ' cominbnations --> ',\ len(combinedRegs1), ' cominbnations') #print(combinedRegs1) # ## calculating dAPI(difference between resultantAPI/expectedAPI) #aftrm10=sid_data[sid_data.description.str.contains('TRM10')] # set lab_field #if (len(aftrm10)>0): lab_field=aftrm10.treat_dc_field.tolist()[0] #combinedRegs1['dAPI']=abs(1 - combinedRegs1['slope_n'] * lab_field / True_API) #print(combinedRegs1) #screened=combinedRegs1 # return combinedRegs1 def clean_duplicates(df,type): clean_df=df[ ((df['step']==0) &(df.XRM==type) )==False] duplicate=df[ ((df['step']==0) &(df.XRM==type) )==True].tail(1) df=pd.concat((clean_df,duplicate)) df.sort_values(by='number',inplace=True) return df def convert_ts_dspin(infile, citations, instrument, ARM_DC_field): # info=pd.read_csv(infile,nrows=4,header=None)[0] weightline=info.loc[info.str.contains('weight')==True] #weight_gm=float(weightline.str.split().values[-1][-1][:-1]) weight_gm=float(re.findall("\d+\.\d+", str(weightline))[0]) IDline=info.loc[info.str.contains('\$')==True].str.split().values[-1] specimen,azimuth,dip,lab_field_uT=IDline[1],float(IDline[2]),float(IDline[3]),float(IDline[4]) site=specimen.split('-')[0] sample=site+'-'+specimen.split('-')[1] # columns=['XRM','step','magn_mass','dir_inc','dir_dec','Smag'] lab_field=lab_field_uT*1e-6 # convert from uT to T data=pd.read_csv(infile,delim_whitespace=True,header=None,skiprows=4) data.columns=columns data['dir_dec']=data['dir_dec']%360 data=data[data.XRM.str.contains('#')==False] # # set some defaults data['description']="" data['specimen']=specimen data['sample']=sample # assume specimen=sample data['site']=site data['weight'],weight=weight_gm*1e-3,weight_gm*1e-3 # use weight in kg data['azimuth']=azimuth data['dip']=dip data['treat_temp']=273. data['treat_ac_field']=data['step']*1e-3 # convert mT to T data['treat_dc_field']=0 data['treat_dc_field_phi']="" data['treat_dc_field_theta']="" data['meas_temp']=273. data['citations']=citations data['software_packages'],version=pmag.get_version(),pmag.get_version() data['instrument_codes']=instrument data['standard']='u' # set to unknown data['quality']='g' # set to good as default methstring='LP-PI-TRM:LP-PI-ALT-AFARM:LP-LT' data['method_codes']=methstring # data=data[((data['step']!=0) & (data.XRM=='ARM00'))==False] # delete all but first ARM00 data=data[((data['step']!=0) & (data.XRM=='ARM10'))==False] # delete all but first ARM10 data=data[((data['step']!=0) & (data.XRM=='ARM20'))==False] # delete all but first ARM20 ## delete the extra step 0 steps for ARM0, ARM1 & ARM2 data['number'] = range(len(data)) # data=clean_duplicates(data,'ARM0') data=clean_duplicates(data,'ARM1') data=clean_duplicates(data,'ARM2') data=clean_duplicates(data,'TRM10') # add descriptions for plotting data.loc[(data.XRM.str.contains('NRM')==True),'description']='NRM' data.loc[(data.XRM.str.contains('NRM0')==True),'description']='NRM0' data.loc[(data.XRM.str.contains('ARM0')==True),'description']='ARM0' data.loc[(data.XRM.str.contains('ARM00')==True),'description']='ARM00' data.loc[(data.XRM.str.contains('TRM1')==True),'description']='TRM1' data.loc[(data.XRM.str.contains('TRM10')==True),'description']='TRM10' data.loc[(data.XRM.str.contains('ARM1')==True),'description']='ARM1' data.loc[(data.XRM.str.contains('ARM10')==True),'description']='ARM10' data.loc[(data.XRM.str.contains('TRM2')==True),'description']='TRM2' data.loc[(data.XRM.str.contains('TRM20')==True),'description']='TRM20' data.loc[(data.XRM.str.contains('ARM2')==True),'description']='ARM2' data.loc[(data.XRM.str.contains('ARM20')==True),'description']='ARM20' # ARM0_step=data[ (data.XRM.str.contains('ARM0')==True)].head(1) if (len(ARM0_step)>0): ARM0_phi=ARM0_step['dir_dec'].values[0] ARM0_theta=ARM0_step['dir_inc'].values[0] # TRM1_step=data[ (data.XRM.str.contains('TRM1')==True)].head(1) if (len(TRM1_step)>0): TRM1_phi=TRM1_step['dir_dec'].values[0] TRM1_theta=TRM1_step['dir_inc'].values[0] # ARM1_step=data[ (data.XRM.str.contains('ARM1')==True)].head(1) if (len(ARM1_step)>0): ARM1_phi=ARM1_step['dir_dec'].values[0] ARM1_theta=ARM1_step['dir_inc'].values[0] # TRM2_step=data[ (data.XRM.str.contains('TRM2')==True)].head(1) if (len(TRM2_step)>0): TRM2_phi=TRM2_step['dir_dec'].values[0] TRM2_theta=TRM2_step['dir_inc'].values[0] # ARM2_step=data[ (data.XRM.str.contains('ARM2')==True)].head(1) if (len(ARM2_step)>0): ARM2_phi=ARM2_step['dir_dec'].values[0] ARM2_theta=ARM2_step['dir_inc'].values[0] # # add in method codes # NRM LTD demag data.loc[(data.XRM.str.contains('NRM0')==True),'method_codes']=\ 'LT-NO:LP-DIR-AF:'+methstring data.loc[((data['step']==0) &(data.XRM=='NRM')),'method_codes']=\ 'LT-LT-Z:LP-DIR-AF:'+methstring data.loc[((data['step']!=0) &(data.XRM=='NRM')),'method_codes']=\ 'LT-AF-Z:LP-DIR-AF:LT-AF-Z-TUMB:'+methstring # ARM0 LTD DEMAG data.loc[(data.XRM.str.contains('ARM00')==True),'method_codes']=\ 'LT-AF-I:LT-NRM-PAR:LP-ARM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='ARM0')),'method_codes']=\ 'LT-AF-I:LT-NRM-PAR:LT-LT-Z:LP-ARM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='ARM0')),'method_codes']=\ 'LT-AF-Z:LP-ARM-AFD:LT-AF-Z-TUMB:'+methstring # TRM1 LTD DEMAG data.loc[(data.XRM.str.contains('TRM10')==True),'method_codes']=\ 'LT-T-I:LP-TRM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='TRM1')),'method_codes']=\ 'LT-LT-Z:LP-TRM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='TRM1')),'method_codes']=\ 'LT-AF-Z:LP-TRM-AFD:LT-AF-Z-TUMB:'+methstring # ARM1 LTD DEMAG data.loc[(data.XRM.str.contains('ARM10')==True),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LP-ARM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='ARM1')),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LT-LT-Z:LP-ARM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='ARM1')),'method_codes']=\ 'LT-AF-Z:LP-ARM-AFD:LT-AF-Z-TUMB:'+methstring # TRM2 LTD DEMAG data.loc[(data.XRM.str.contains('TRM20')==True),'method_codes']=\ 'LT-T-I:LP-TRM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='TRM2')),'method_codes']=\ 'LT-LT-Z:LP-TRM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='TRM2')),'method_codes']=\ 'LT-AF-Z:LP-TRM-AFD:LT-AF-Z-TUMB:'+methstring # ARM2 LTD DEMAG data.loc[(data.XRM.str.contains('ARM20')==True),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LP-ARM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='ARM2')),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LT-LT-Z:LP-ARM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='ARM2')),'method_codes']=\ 'LT-AF-Z:LP-ARM-AFD:LT-AF-Z-TUMB:'+methstring # data['experiment'],experiment=specimen+':'+methstring,specimen+':'+methstring # # reset lab field directions to TRM direction for TRM steps data.loc[(data.method_codes.str.contains('LT-T-I')==True),'treat_dc_field']=lab_field if (len(TRM1_step)>0): data.loc[( (data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM1'))),'treat_dc_field_phi']=TRM1_phi data.loc[((data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM1'))),'treat_dc_field_theta']=TRM1_theta if (len(TRM2_step)>0): data.loc[( (data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM2'))),'treat_dc_field_phi']=TRM2_phi data.loc[((data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM2'))),'treat_dc_field_theta']=TRM2_theta # # reset lab field directions to ARM direction for ARM steps data.loc[(data.method_codes.str.contains('LT-AF-I')==True),'treat_dc_field']=ARM_DC_field if (len(ARM0_step)>0): data.loc[( (data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM0'))),'treat_dc_field_phi']=ARM0_phi data.loc[((data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM0'))),'treat_dc_field_theta']=ARM0_theta # if (len(ARM1_step)>0): data.loc[( (data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM1'))),'treat_dc_field_phi']=ARM1_phi data.loc[((data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM1'))),'treat_dc_field_theta']=ARM1_theta # if (len(ARM2_step)>0): data.loc[( (data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM2'))),'treat_dc_field_phi']=ARM2_phi data.loc[((data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM2'))),'treat_dc_field_theta']=ARM2_theta # # temperature of liquid nitrogen data.loc[(data.method_codes.str.contains('LT-LT-Z')==True),'treat_temp']=77 # meas_data=data[['specimen','magn_mass','dir_dec','dir_inc','treat_temp','treat_ac_field',\ 'treat_dc_field','treat_dc_field_phi','treat_dc_field_theta','meas_temp',\ 'citations','number','experiment','method_codes','software_packages',\ 'instrument_codes','standard','quality','description']] meas_data['magn_moment']=meas_data['magn_mass']*weight # meas_data['sequence']=meas_data.index spec_data=pd.DataFrame([{'specimen':specimen,'sample':sample,'weight':weight,\ 'azimuth':0,'dip':0,'experiments':experiment,'result_quality':'g',\ 'method_codes':methstring,'citations':citations,'software_packages':version}]) # spec_data['result_type']='i' spec_data['result_quality']='g' spec_data['description']=" " if azimuth==0 and dip==0: spec_data['dir_tilt_correction']=-1 else: spec_data['dir_tilt_correction']=0 samp_data=spec_data[['sample']] samp_data['site']=site samp_data['azimuth']=0 samp_data['dip']=0 samp_data['orientation_quality']='g' samp_data['description']=\ 'measurements directions corrected with: azimuth='+str(azimuth)+' dip='+str(dip) # # write out the data file return meas_data, spec_data, samp_data def find_best_API_portion_r(combinedRegs1,minFrac,minR,minSlopeT,maxSlopeT): """ Finds the best portion for NRM-TRM1* and TRM1-TRM2* plots by r criteria of Yamamoto+2003 (1) calculate API statistics for all possible coercivity intervals (2) discard the statistics not satisfying the usual selection criteria (when applicable) omitted - (3) sort the statistics by dAPI (rel. departure from the expected API), and select the best 10 statistics (4) sort the statistics by frac_n, and select the best one Curvature (k) calculation is made by the code for Arai plot by Lisa. This is done for inverterd-X (e.g. -TRM1, -ARM1, ..) and original-Y (e.g. NRM, ARM0, ..). The inverted-X is offset (positive) to zero as a minimum. revised 2021/09/06 __________ combinedRegs1 : combined API parameters minFrac,minR,minSlopeT,maxSlopeT : thresholds for the r criteria Returns ______ trm1_star_min trm1_star_max trm2_star_min trm2_star_max """ print('[criteria, 2nd heating]') # screened=combinedRegs1[combinedRegs1.frac_t>=minFrac] if (len(screened)>0): print(' Frac_t >=', minFrac, ': ', len(screened),'step-combinations') else: print(' Frac_t >=', minFrac, ': no step-combinations satisfied') screened=combinedRegs1 # screened2=screened[screened.r_t>=minR] if (len(screened2)>0): print(' r_t >=', minR, ': ', len(screened2),'step-combinations') screened=screened2 else: print(' r_t >=', minR, ': no step-combinations satisfied') # screened3=screened[(screened.slope_t>=minSlopeT)\ &(screened.slope_t<=maxSlopeT)] if (len(screened3)>0): print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ ': ', len(screened3),'step-combinations') screened=screened3 else: print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ ': no step-combinations satisfied') # print('[criteria, 1st heating]') # screened4=screened[screened.frac_n>=minFrac] if (len(screened4)>0): print(' Frac_n >=', minFrac, ': ', len(screened4),'step-combinations') screened=screened4 else: print(' Frac_n >=', minFrac, ': no step-combinations satisfied') # screened5=screened[screened.r_n>=minR] if (len(screened5)>0): print(' r_n >=', minR, ': ', len(screened5),'step-combinations') screened=screened5 else: print(' r_n >=', minR, ': no step-combinations satisfied') ## sort by dAPI, then select top 10 #print('[sort by dAPI and select the top 10 data]') #screened=screened.sort_values('dAPI') #screened=screened.iloc[:10] # # sort by frac_n, then select the best print('[sort by frac_n and select the best step-combination]') screened=screened.sort_values('frac_n', ascending=False) screened_best_fn=screened.iloc[:1] #print(screened) trm2_star_min=screened_best_fn['step_min_t'].iloc[0] trm2_star_max=screened_best_fn['step_max'].iloc[0] trm1_star_min=screened_best_fn['step_min_n'].iloc[0] trm1_star_max=screened_best_fn['step_max'].iloc[0] # return trm1_star_min, trm1_star_max, trm2_star_min, trm2_star_max, screened def find_best_API_portion_k(combinedRegs1,maxBeta,maxFresid,maxKrv): """ Finds the best portion for NRM-TRM1* and TRM1-TRM2* plots by k' criteria of Lloyd+2021 (1) calculate API statistics for all possible coercivity intervals (2) discard the statistics not satisfying the Beta criterion (0.1) and the k' criterion (0.2) omitted - (3) sort the statistics by dAPI (rel. departure from the expected API), and select the best 10 statistics (4) sort the statistics by frac_n, and select the best one __________ combinedRegs1 : combined API parameters minFrac,minR,minSlopeT,maxSlopeT : thresholds for the r criteria Returns ______ trm1_star_min trm1_star_max trm2_star_min trm2_star_max """ print('[criteria, 2nd heating]') screened=combinedRegs1 # #screened=combinedRegs1[combinedRegs1.frac_t>=minFrac] #if (len(screened)>0): # print(' Frac_t >=', minFrac, ': ', len(screened),'step-combinations') #else: # print(' Frac_t >=', minFrac, ': no step-combinations satisfied') # screened=combinedRegs1 ## #screened2=screened[screened.krvd_t<=maxKrv] #if (len(screened2)>0): # print(' k\' <=', maxKrv, ': ', len(screened2),'step-combinations') # screened=screened2 #else: # print(' k\' <=', maxKrv, ': no step-combinations satisfied') ## #screened3=screened[(screened.slope_t>=minSlopeT)\ # &(screened.slope_t<=maxSlopeT)] #if (len(screened3)>0): # print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ # ': ', len(screened3),'step-combinations') # screened=screened3 #else: # print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ # ': no step-combinations satisfied') ## print('[criteria, 1st heating]') # #screened4=screened[screened.frac_n>=minFrac] #if (len(screened4)>0): # print(' Frac_n >=', minFrac, ': ', len(screened4),'step-combinations') # screened=screened4 #else: # print(' Frac_n >=', minFrac, ': no step-combinations satisfied') # screened5=screened[screened.beta_n<=maxBeta] if (len(screened5)>0): print(' beta <=', maxBeta, ': ', len(screened5),'step-combinations') screened=screened5 else: print(' beta <=', maxBeta, ': no step-combinations satisfied') # screened6=screened[screened.f_resid_n<=maxFresid] if (len(screened6)>0): print(' f_resid <=', maxBeta, ': ', len(screened6),'step-combinations') screened=screened6 else: print(' f_resid <=', maxBeta, ': no step-combinations satisfied') # screened7=screened[abs(screened.krvd_n)<=maxKrv] if (len(screened7)>0): print(' abs_k\' <=', maxKrv, ': ', len(screened7),'step-combinations') screened=screened7 else: print(' abs_k\' <=', maxKrv, ': no step-combinations satisfied') ## sort by dAPI, then select top 10 #print('[sort by dAPI and select the top 10 data]') #screened=screened.sort_values('dAPI') #screened=screened.iloc[:10] # sort by frac_n, then select the best print('[sort by frac_n and select the best step-combination]') screened=screened.sort_values('frac_n', ascending=False) screened_fn=screened.iloc[:1] #print(screened) trm2_star_min=screened_fn['step_min_t'].iloc[0] trm2_star_max=screened_fn['step_max'].iloc[0] trm1_star_min=screened_fn['step_min_n'].iloc[0] trm1_star_max=screened_fn['step_max'].iloc[0] # return trm1_star_min, trm1_star_max, trm2_star_min, trm2_star_max, screened def find_mdf(df): """ Finds the median destructive field for AF demag data Parameters __________ df : dataframe of measurements Returns ______ mdf : median destructive field """ mdf_df=df[df.meas_norm<=0.5] mdf_high=mdf_df.treat_ac_field_mT.values[0] mdf_df=df[df.meas_norm>=0.5] mdf_low=mdf_df.treat_ac_field_mT.values[-1] mdf=int(0.5*(mdf_high+mdf_low)) return mdf def ltd_pars(df1,afxrm,step_min,step_max,xkey,ykey): # used1=df1[(df1.treat>=step_min)&(df1.treat<=step_max)] n=len(used1) slope, b, r, p, stderr =\ linregress(used1[xkey].values.astype('float'),\ used1[ykey].values.astype('float')) coeffs1=np.polyfit(used1[xkey].values.astype('float'),used1[ykey].values.astype('float'),1) coeffs2=np.polyfit(used1[xkey].values.astype('float'),used1[ykey].values.astype('float'),2) # beta=stderr/abs(slope) # krv=lib_k.AraiCurvature(x=df1[xkey],y=df1[ykey])[0] krv_dash=lib_k.AraiCurvature(x=used1[xkey].values.astype('float'),\ y=used1[ykey].values.astype('float'))[0] # linY=np.polyval(coeffs1,used1[xkey].values.astype('float')) curveY=np.polyval(coeffs2,used1[xkey].values.astype('float')) chi1, chi2 = (used1[ykey]-linY)**2, (used1[ykey]-curveY)**2 chi1sum, chi2sum = chi1.sum(), chi2.sum() dAIC = n * (np.log(chi1sum) - np.log(chi2sum)) - 2 # used2=afxrm[(afxrm.treat_ac_field_mT>=step_min)&(afxrm.treat_ac_field_mT<=step_max)] tblock=used2[['dir_dec','dir_inc','meas_norm']] tall=afxrm[['dir_dec','dir_inc','meas_norm']] XYZ, XYZall = pmag.dir2cart(tblock).transpose(), pmag.dir2cart(tall).transpose() Rused, Rall = vds(XYZ), vds(XYZall) frac=Rused/Rall # y_int = coeffs1[1] y_prime = [] for i in range(0, len(used1[ykey])): y_prime.append(0.5 * (used1[ykey].values.astype('float')[i] \ + slope * used1[xkey].values.astype('float')[i] + y_int)) #print(y_prime) delta_y_prime = abs(max(y_prime) - min(y_prime)) f_resid = abs(y_int) / delta_y_prime #print('f_resid=',f_resid) # return n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,used1 def ltd_pars_mod(df1,afxrm,step_min,step_max,xkey,ykey): # n, slope, b, r, stderr, coeffs1, coeffs2, dAIC, frac, beta, krv, krv_dash, f_resid, used1 =\ ltd_pars(df1,afxrm,step_min,step_max,xkey,ykey) # return n,slope,r,dAIC,frac,step_min,step_max,beta,krv,krv_dash,f_resid def opt_interval_first_heating(zijd_min, sid_df, afnrm, minN, minFrac, minR): # ntrmRegs1=[] trm1_star_min=zijd_min used_df=sid_df[sid_df.treat>=trm1_star_min] used_df=used_df[['treat','nrm','trm1_star']] trm1_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + \ [[used_df, afnrm, used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm1_star','nrm']] p=Pool(multi.cpu_count()) ntrmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) ntrmRegs1.columns=['n_n','slope_n','r_n','dAIC_n','frac_n',\ 'step_min','step_max','beta_n','krv_n','krvd_n','f_resid_n'] p.close() p.terminate() #print(ntrmRegs1) screened=ntrmRegs1 screened2=ntrmRegs1[ntrmRegs1.frac_n>=minFrac] if (len(screened2)>0): screened=screened2 screened3=screened[ntrmRegs1.r_n>=minR] if (len(screened3)>0): screened=screened3 screened=screened.sort_values('dAIC_n') screened=screened.iloc[:10] #print(screened) # decide optimum interval trm1_star_min = screened.loc[screened.frac_n.idxmax(), "step_min"] trm1_star_max = screened.loc[screened.frac_n.idxmax(), "step_max"] print('opt interval NRM-TRM1*: %5.1f'%(trm1_star_min) \ + ' - %5.1f'%(trm1_star_max) + ' mT') # return trm1_star_min, trm1_star_max def opt_interval_second_heating(sid_df, aftrm1, minN, minFrac, minR, minSlopeT, maxSlopeT): # trmRegs1=[] # interval serach from ZERO up to MAX trm2_star_min=sid_df['treat'].tolist()[0] used_df=sid_df[sid_df.treat>=trm2_star_min] used_df=used_df[['treat','trm1','trm2_star']] trm2_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + [[used_df, aftrm1,\ used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm2_star','trm1']] p=Pool(multi.cpu_count()) trmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) trmRegs1.columns=['n_t','slope_t','r_t','dAIC_t','frac_t',\ 'step_min','step_max','beta_t','krv_t','krvd_t','f_resid_t'] p.close() p.terminate() #print(trmRegs1) screened=trmRegs1[trmRegs1.frac_t>=minFrac] screened2=screened[trmRegs1.r_t>=minR] if (len(screened2)>0): screened=screened2 screened3=screened[(trmRegs1.slope_t>=minSlopeT)&(trmRegs1.slope_t<=maxSlopeT)] if (len(screened3)>0): screened=screened3 screened=screened.sort_values('dAIC_t') screened=screened.iloc[:10] #print(screened) # decide optimum interval trm2_star_min = screened.loc[screened.frac_t.idxmax(), "step_min"] trm2_star_max = screened.loc[screened.frac_t.idxmax(), "step_max"] print('opt interval TRM1-TRM2*: %5.1f'%(trm2_star_min) \ + ' - %5.1f'%(trm2_star_max) + ' mT') # return trm2_star_min, trm2_star_max def opt_interval_zij(afnrm, minN): # # optimum interval serach from ZERO up to MAX variables = [] for i in range(len(afnrm)-minN+1): for j in range(len(afnrm)-minN+1-i): variables = variables + [[afnrm,\ afnrm['treat_ac_field_mT'].tolist()[i],\ afnrm['treat_ac_field_mT'].tolist()[i+j+minN-1]]] p=Pool(multi.cpu_count()) zijPCArsts1=pd.DataFrame(p.map(wrapper_zijd_PCA_calc, variables)) zijPCArsts1.columns=['step_min','step_max','mad','dang','spec_n'] zijPCArsts1['mad+dang']=zijPCArsts1['mad']+zijPCArsts1['dang'] p.close() p.terminate() #print(zijPCArsts1) screened=zijPCArsts1.sort_values('mad+dang') screened=screened.iloc[:10] #print(screened) # decide optimum interval step_min_mad_min = screened.loc[screened['mad'].idxmin(), "step_min"] step_max_mad_min = screened.loc[screened['mad'].idxmin(), "step_max"] step_min_dang_min = screened.loc[screened['dang'].idxmin(), "step_min"] step_max_dang_min = screened.loc[screened['dang'].idxmin(), "step_max"] step_min_opt_zij = step_min_mad_min \ if step_min_mad_min < step_min_dang_min else step_min_dang_min step_max_opt_zij = step_max_mad_min \ if step_max_mad_min > step_max_dang_min else step_max_dang_min print('opt interval Zijderveld: %5.1f'%(step_min_opt_zij)+ ' - %5.1f'%(step_max_opt_zij) + ' mT') # return step_min_opt_zij, step_max_opt_zij def plot_af_xrm(sid,sdf,ax,df,rem_type): # df=df.reset_index() # if 'ARM' in rem_type: xrm0=df.magn_mass_diff.tolist()[0] df0=sdf[sdf.description.str.contains(rem_type+'0')] df0=df0.tail(1) df0['meas_norm']=df0['magn_mass_diff']/xrm0 dflt=df0[df0.method_codes.str.contains('LT-LT-Z')==True] else: xrm0=df.magn_mass.tolist()[0] df0=sdf[sdf.description.str.contains(rem_type+'0')] df0=df0.tail(1) df0['meas_norm']=df0['magn_mass']/xrm0 dflt=df[df.method_codes.str.contains('LT-LT-Z')==True] #print(df0) #print(dflt) #df0=df0.reset_index() #dflt=dflt.reset_index() # afdmax=df['treat_ac_field_mT'].max() mdf=find_mdf(df) # # plot definitions ax.set_title(sid+'\n '+str(rem_type)+'$_0$='+'%8.2e'%(xrm0)\ +' Am$^2$/kg ; MDF ~ '+str(mdf)+' mT') ax.set_xlabel('alternating field (mT)') ax.set_ylabel(str(rem_type)+'/'+str(rem_type)+'$_0$') ax.set_xlim(-10,200) ymax=df.meas_norm.max() if df0['meas_norm'].max() > 1.0: ax.set_ylim(-0.05,df0['meas_norm'].max()*1.1) else: ax.set_ylim(-0.05,ymax*1.1) # dotted line for each 0.5 interavl for Y for i in range(int(ymax//0.5)+1): ax.axhline(0.5*i,linestyle='dotted') # # plot main data ax.plot(df['treat_ac_field_mT'],df['meas_norm'],'ro') ax.plot(df['treat_ac_field_mT'],df['meas_norm'],'r-') # put on the last AF step magnetization ax.text(df['treat_ac_field_mT'].values[-1]+.05,\ df['meas_norm'].values[-1]+.02,'%5.3f'%(df['meas_norm'].values[-1])) # plot the data at af=0 if (len(df0)>0): ax.plot(df0['treat_ac_field_mT'],df0['meas_norm'],'wo',markeredgecolor='black') ax.text(df0['treat_ac_field_mT']+.075,df0['meas_norm']+.02,\ '%5.3f'%(df0['meas_norm'])) if (len(dflt)>0): ax.plot(dflt['treat_ac_field_mT'],dflt['meas_norm'],'bo') # # normalized rations at af=0 and afmax ratio_0 = 0 if (len(df0)>0): ratio_0 = df0['meas_norm'].values[0] # ratio_afmax = 0 if (len(df)>0): ratio_afmax = df['meas_norm'].values[-1] # return afdmax, mdf, xrm0*1e6, ratio_0, ratio_afmax def plot_ntrm_arm(sid,ax,df,afxrm,step_min,step_max,xkey,ykey): # fac=1e6 unit=' $\mu$Am$^2$/kg' # #fac=1e3 #unit=' mAm$^2$/kg' # n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,selected_df =\ ltd_pars(df,afxrm,step_min,step_max,xkey,ykey) # xymax=1.1*fac*np.array([[df[ykey].max(),df[xkey].max()]]).max() tick=[float('{:.1e}'.format(xymax*(i+1)/4)) for i in range(4)] if (slope<1.5): [xl, yl1, yl2, yl3, yl4]=[0.10, 0.90, 0.85, 0.80, 0.75] if (slope>=1.5): [xl, yl1, yl2, yl3, yl4]=[0.50, 0.20, 0.15, 0.10, 0.05] # linY=np.polyval(coeffs1,selected_df[xkey].values.astype('float')) # ax.set_title(sid) ax.set_xlabel(xkey.upper()+unit) ax.set_ylabel(ykey.upper()+unit) ax.set_xlim(0,xymax) ax.set_ylim(0,xymax) ax.set_xticks(tick) ax.set_yticks(tick) # ax.plot(df[xkey]*fac,df[ykey]*fac,'wo',markeredgecolor='black') ax.plot(selected_df[xkey]*fac,selected_df[ykey]*fac,'ko') ax.plot(fac*selected_df[xkey].values.astype('float'),fac*linY,'r-') ax.text(xl, yl1,'slope= %5.3f'%(slope)+'$\pm$%5.3f'%(stderr),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl2,'r= %5.3f'%(r)+', N = '+str(n),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl3,'k\'= %5.3f'%(krv_dash)+' (k= %5.3f'%(krv)+')',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl4,'('+str(int(step_min))+'-'+str(int(step_max))+' mT)',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) # return slope,r,n,krv,krv_dash def plot_pint_main(sid,ax,df1,afxrm,xkey,ykey,step_min,step_max,aftrm1,aftrm2,spec_prv_df,criteria,minR,minFrac,minSlopeT,maxSlopeT,maxBeta,maxFresid,maxKrv,lab_field): # tick_div=4 # fac=1e6 unit=' $\mu$Am$^2$/kg' # #fac=1e3 #unit=' mAm$^2$/kg' # n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,selected_df =\ ltd_pars(df1,afxrm,step_min,step_max,xkey,ykey) # xymax=1.1*fac*np.array([[df1[xkey].max(),df1[ykey].max()]]).max() tick=[float('{:.1e}'.format(xymax*(i+1)/tick_div)) for i in range(tick_div)] if (slope<1.5): [xl, yl1, yl2, yl3, yl4, yl5, yl6]=[0.10, 0.90, 0.85, 0.80, 0.75, 0.70, 0.65] if (slope>=1.5): [xl, yl1, yl2, yl3, yl4, yl5, yl6]=[0.50, 0.35, 0.30, 0.25, 0.20, 0.15, 0.10] # linY=np.polyval(coeffs1,selected_df[xkey].values.astype('float')) # pint='rejected' if (xkey=='trm1_star') & (ykey=='nrm'): if (len(aftrm1)>0) & (len(aftrm2)>0): slope_t=float(spec_prv_df.loc[sid,'slope_TRM1-TRM2*']) if ('reg' in criteria) & (r>=minR) & (frac>=minFrac) \ & (slope_t>=minSlopeT) & (slope_t<=maxSlopeT): pint='%7.2f'%(slope*lab_field*1e6)+' $\mu$T' if ('krv' in criteria) & (beta<=maxBeta) & (f_resid<=maxFresid) & (krv_dash<=maxKrv): pint='%7.2f'%(slope*lab_field*1e6)+' $\mu$T' ax.set_title(sid) #if (xkey=='trm1_star') & (ykey=='nrm'): # ax.set_title(sid+' (B$_{anc}$=%7.2f'%(slope*lab_field*1e6)+' $\mu$T)') #else: ax.set_title(sid) ax.set_xlim(0,xymax) ax.set_ylim(0,xymax) ax.set_xticks(tick) ax.set_yticks(tick) if (xkey=='trm1_star') & (ykey=='nrm'): ax.set_xlabel('TRM1*'+unit) ax.set_ylabel('NRM'+unit) if (xkey=='trm2_star') & (ykey=='trm1'): ax.set_xlabel('TRM2*'+unit) ax.set_ylabel('TRM1'+unit) ax.plot([0,xymax],[0,xymax],color='g',linestyle='dotted') # ax.plot(df1[xkey]*fac, df1[ykey]*fac, 'wo', markeredgecolor='black') ax.plot(selected_df[xkey]*fac, selected_df[ykey]*fac, 'ko') ax.plot(fac*selected_df[xkey].values.astype('float'),fac*linY,'r-') # ax.text(xl, yl1,'slope= %5.3f'%(slope)+'$\pm$%5.3f'%(stderr),\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) #ax.text(xl, yl2,'r= %5.3f'%(r)+', k\'= %5.3f'%(krv_dash)+', N = '+str(n),\ ax.text(xl, yl2,'r= %5.3f'%(r)+', N= '+str(n),\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) ax.text(xl, yl3,'FRAC= '+'%5.3f'%(frac)+', $\Delta$AIC= '+'%5.1f'%(dAIC),\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) #ax.text(xl, yl4,'$\Delta$AIC = '+'%5.1f'%(dAIC),\ ax.text(xl, yl4,'k\'= %5.3f'%(krv_dash)+' (k= %5.3f'%(krv)+')',\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) if (xkey=='trm1_star') & (ykey=='nrm'): ax.text(xl, yl5,'B$_{anc}$= '+pint,\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) ax.text(xl, yl6,'('+str(int(step_min))+'-'+str(int(step_max))+' mT)',\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) # return slope,r,n,frac,dAIC,krv,krv_dash,f_resid,pint def plot_xrm_xrm2_r2(sid,ax,df,afxrm,xkey,ykey,step_min,step_max): # fac=1e6 unit=' $\mu$Am$^2$/kg' # #fac=1e3 #unit=' mAm$^2$/kg' # n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,selected_df =\ ltd_pars(df,afxrm,step_min,step_max,xkey,ykey) if 'trm1' in xkey: xymax=1.1*fac*np.array([[df['trm1'].max(),df['nrm'].max()]]).max() if 'trm2' in xkey: xymax=1.1*fac*np.array([[df['trm1'].max(),df['trm2'].max(),df['nrm'].max()]]).max() if 'arm1' in xkey: xymax=1.1*fac*np.array([[df['arm0'].max(),df['arm1'].max()]]).max() if 'arm2' in xkey: xymax=1.1*fac*np.array([[df['arm0'].max(),df['arm1'].max(),df['arm2'].max()]]).max() tick=[float('{:.1e}'.format(xymax*(i+1)/4)) for i in range(4)] if (slope<1.5): [xl, yl1, yl2, yl3, yl4]=[0.10, 0.90, 0.85, 0.80, 0.75] if (slope>=1.5): [xl, yl1, yl2, yl3, yl4]=[0.50, 0.20, 0.15, 0.10, 0.05] ax.set_title(sid) ax.set_xlabel(xkey.upper()+unit) ax.set_ylabel(ykey.upper()+unit) ax.set_xlim(0,xymax) ax.set_ylim(0,xymax) ax.set_xticks(tick) ax.set_yticks(tick) # if ykey!='nrm': ax.plot([0,xymax],[0,xymax],color='g',linestyle='dotted') ax.plot(df[xkey]*fac,df[ykey]*fac,'wo',markeredgecolor='black') ax.plot(selected_df[xkey]*fac,selected_df[ykey]*fac,'ko') # ax.text(xl, yl1,'slope= %5.3f'%(slope)+'$\pm$%5.3f'%(stderr),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl2,'r= %5.3f'%(r)+', N = '+str(n),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl3,'k\'= %5.3f'%(krv_dash)+' (k= %5.3f'%(krv)+')',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl4,'('+str(int(step_min))+'-'+str(int(step_max))+' mT)',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) # return slope,r,n,krv,krv_dash def plot_zijd(sid, sid_data, ax1, ax2, df, step_min, step_max): # # ax1 for equal-size, ax2 for close-up # whole=df used=df[(df.treat_ac_field_mT>=step_min)&(df.treat_ac_field_mT<=step_max)] xrm0=df.magn_mass.tolist()[0] df0=sid_data[sid_data.description.str.contains('NRM0')] if (len(df0.index)>0): df0['meas_norm']=df0['magn_mass']/xrm0 pre_LTD=df0 # ## PCA calculation pca_block=used[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm']] pca_block['quality']='g' pca_block=pca_block[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm','quality']].values.tolist() pca_result=pmag.domean(pca_block, 0, len(pca_block)-1, 'DE-BFL') #print(pca_result) pca_dec=pca_result['specimen_dec'] pca_inc=pca_result['specimen_inc'] pca_mad=pca_result['specimen_mad'] pca_n=pca_result['specimen_n'] # # title, label interval='('+str(int(step_min))+'-'+str(int(step_max))+' mT)' ax1.set_title(sid+' '+interval) ax2.set_title(sid+' '+interval) PCA='PCA: Dec= %7.1f'%(pca_dec)+', Inc= %7.1f'%(pca_inc)+', MAD= %8.2f'%(pca_mad)+', N= %2d'%(pca_n) ax1.set_xlabel(PCA) ax2.set_xlabel(PCA) # ## plot pre-LTD interval if len(list(df0.index))>0: xrm0=pre_LTD[['dir_dec','dir_inc','meas_norm']].values xyz_0=pmag.dir2cart(xrm0).transpose() ax1.plot(xyz_0[0],-xyz_0[1],color='grey',marker='o') ax1.plot(xyz_0[0],-xyz_0[2],color='grey',marker='s') ax2.plot(xyz_0[0],-xyz_0[1],color='grey',marker='o') ax2.plot(xyz_0[0],-xyz_0[2],color='grey',marker='s') # ## plot whole interval if len(list(whole.index))>0: zblock=whole[['dir_dec','dir_inc','meas_norm']] xyz_wl=pmag.dir2cart(zblock).transpose() ax1.plot(xyz_wl[0],-xyz_wl[1],color='grey',marker='o') ax1.plot(xyz_wl[0],-xyz_wl[2],color='grey',marker='s') ax2.plot(xyz_wl[0],-xyz_wl[1],color='grey',marker='o') ax2.plot(xyz_wl[0],-xyz_wl[2],color='grey',marker='s') # ## plot used interval zblock=used[['dir_dec','dir_inc','meas_norm']] XYZ=pmag.dir2cart(zblock).transpose() ax1.plot(XYZ[0],-XYZ[1],'ko') ax1.plot(XYZ[0],-XYZ[2],'ws',markeredgecolor='blue') ax1.plot(XYZ[0],-XYZ[1],'k-') ax1.plot(XYZ[0],-XYZ[2],'k-') ax2.plot(XYZ[0],-XYZ[1],'ko') ax2.plot(XYZ[0],-XYZ[2],'ws',markeredgecolor='blue') ax2.plot(XYZ[0],-XYZ[1],'k-') ax2.plot(XYZ[0],-XYZ[2],'k-') # # put on best fit line Rstart=np.sqrt((XYZ[0][0])**2+(XYZ[1][0])**2+(XYZ[2][0])**2) Rstop=np.sqrt((XYZ[0][-1])**2+(XYZ[1][-1])**2+(XYZ[2][-1])**2) XYZ_start=pmag.dir2cart([pca_dec,pca_inc,Rstart]) XYZ_stop=-1*pmag.dir2cart([pca_dec,pca_inc,Rstop]) ax1.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[1],-XYZ_stop[1]],'r-') ax1.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[2],-XYZ_stop[2]],'r-') ax2.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[1],-XYZ_stop[1]],'r-') ax2.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[2],-XYZ_stop[2]],'r-') # # get max and min [xmax,xmin,ymax,ymin]=[0,0,0,0] if len(list(df0.index))>0: xmax=np.max([xyz_0[0].max(),xyz_wl[0].max()]) xmin=np.min([xyz_0[0].min(),xyz_wl[0].min()]) ymax=np.max([(-xyz_0[1]).max(),(-xyz_0[2]).max(),\ (-xyz_wl[1]).max(),(-xyz_wl[2]).max()]) ymin=np.min([(-xyz_0[1]).min(),(-xyz_0[2]).min(),\ (-xyz_wl[1]).min(),(-xyz_wl[2]).min()]) else: xmax=np.max([xyz_wl[0].max()]) xmin=np.min([xyz_wl[0].min()]) ymax=np.max([(-xyz_wl[1]).max(),(-xyz_wl[2]).max()]) ymin=np.min([(-xyz_wl[1]).min(),(-xyz_wl[2]).min()]) #print(xmin, xmax) #print(ymin, ymax) [xlength, ylength]=[xmax-xmin, ymax-ymin] xylength=max(xlength, ylength) # # plot size adjustment for ax1 div=2 tick1=[float('{:.1e}'.format(-xylength*(i+1)/div)) for i in range(div)] tick2=[0.0] tick3=[float('{:.1e}'.format(xylength*(i+1)/div)) for i in range(div)] tick=tick1 + tick2 + tick3 ax1.plot([-xylength*1.1, xylength*1.1],[0,0],'k-') ax1.set_xlim(-xylength*1.1, xylength*1.1) ax1.plot([0,0], [-xylength*1.1, xylength*1.1],'k-') ax1.set_ylim(-xylength*1.1, xylength*1.1) ax1.set_xticks(tick) ax1.set_yticks(tick) # # plot size adjustment for ax2 if xmin>0: ax2.plot([-xlength*0.1, xmax+xlength*0.1],[0,0],'k-') ax2.set_xlim(-xlength*0.1, xmax+xlength*0.1) if xmin<0: if xmax<0: ax2.plot([xmin-xlength*0.1, xlength*0.1],[0,0],'k-') ax2.set_xlim(xmin-xlength*0.1, xlength*0.1) if xmax>0: ax2.plot([xmin-xlength*0.1, xmax+xlength*0.1],[0,0],'k-') ax2.set_xlim(xmin-xlength*0.1, xmax+xlength*0.1) if ymin>0: ax2.plot([0,0], [-ylength*0.1, ymax+ylength*0.1],'k-') ax2.set_ylim(-ylength*0.1, ymax+ylength*0.1) if ymin<0: if ymax<0: ax2.plot([0,0], [ymin-ylength*0.1, ylength*0.1],'k-') ax2.set_ylim(ymin-ylength*0.1, ylength*0.1) if ymax>0: ax2.plot([0,0], [ymin-ylength*0.1, ymax+ylength*0.1],'k-') ax2.set_ylim(ymin-ylength*0.1, ymax+ylength*0.1) # return pca_dec, pca_inc, pca_mad, pca_n def prep_sid_df(xrm_types, df): # subtract the last treatment step from the others for all types # set afxrm data for all types (afnrm, afarm0, ...) for t in xrm_types: if t=='NRM': afnrm, sd_diff_n =set_NTRM_data(df,t) if t=='TRM1': aftrm1,sd_diff_t1=set_NTRM_data(df,t) if t=='TRM2': aftrm2,sd_diff_t2=set_NTRM_data(df,t) if t=='ARM0': afarm0,sd_diff_a0=set_ARM_data(df,t) if t=='ARM1': afarm1,sd_diff_a1=set_ARM_data(df,t) if t=='ARM2': afarm2,sd_diff_a2=set_ARM_data(df,t) print ('NRM: ',len(afnrm),' data, TRM1:',len(aftrm1),' data, TRM2:',len(aftrm2),' data') print ('ARM0: ',len(afarm0),' data, ARM1:',len(afarm1),' data, ARM2:',len(afarm2),' data') # set data for bi-plot: merged by the treatment steps against each other if (len(afnrm)>0): sid0_df=afnrm[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','nrm'] sid_df=sid0_df sid_data_diff=sd_diff_n if (len(afarm0)>0): sid0_df=afarm0[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','arm0'] sid_df=sid_df[['treat','nrm']].merge(\ sid0_df[['treat','arm0']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_a0]) if (len(aftrm1)>0): sid0_df=aftrm1[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','trm1'] sid_df=sid_df[['treat','nrm','arm0']].merge(\ sid0_df[['treat','trm1']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_t1]) if (len(afarm1)>0): sid0_df=afarm1[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','arm1'] sid_df=sid_df[['treat','nrm','arm0','trm1']].merge(\ sid0_df[['treat','arm1']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_a1]) if (len(aftrm2)>0): sid0_df=aftrm2[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','trm2'] sid_df=sid_df[['treat','nrm','arm0','trm1','arm1']].merge(\ sid0_df[['treat','trm2']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_t2]) if (len(afarm2)>0): sid0_df=afarm2[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','arm2'] sid_df=sid_df[['treat','nrm','arm0','trm1','arm1','trm2']].merge(\ sid0_df[['treat','arm2']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_a2]) last_treat=sid_df.treat.max() # need to peel off the last step for division step sid_df=sid_df[sid_df.treat<last_treat] # calculate TRM1* and TRM2* if (len(aftrm1)>0) & (len(afarm0)>0) & (len(afarm1)>0): sid_df['trm1_star']=sid_df['trm1']*(sid_df['arm0']/sid_df['arm1']) if (len(aftrm2)>0) & (len(afarm1)>0) & (len(afarm2)>0): sid_df['trm2_star']=sid_df['trm2']*(sid_df['arm1']/sid_df['arm2']) # put the last treatment step back in (as zero) last_df=pd.DataFrame([np.zeros(len(list(sid_df.columns)))]) last_df.columns=sid_df.columns last_df['treat']=last_treat new_df=pd.concat((sid_df,last_df)) new_df.reset_index(inplace=True,drop=True) sid_df=new_df # return sid_data_diff,sid_df,afnrm,aftrm1,aftrm2,afarm0,afarm1,afarm2 def set_ARM_data(df,rem_type): """ choose and calculate ARM data (except pre-LTD 0 data) from the inpud data Paramters _________ df : dataframe of measurement data rem_type : remanence type Returns ________ afxrm : XRM data with "meas_norm" column df3 : with base-vector-subtracted data """ XRM0 = str(rem_type) + '0' df2=subtract_base_vector(df,rem_type) df3=df2[df2.description.str.contains(rem_type)] afxrm=df3 if (len(afxrm)>0): meas0=afxrm.magn_mass_diff.tolist()[0] afxrm['meas_norm']=afxrm['magn_mass_diff']/meas0 afxrm=afxrm.loc[afxrm.method_codes.str.contains('LT-LT-Z')==False] afxrm=df2[df2.description.str.contains(rem_type)] afxrm=afxrm[afxrm.description.str.contains(XRM0)==False] meas0=afxrm.magn_mass_diff.tolist()[0] afxrm['meas_norm']=afxrm['magn_mass_diff']/meas0 return afxrm,df3 def set_NTRM_data(df,rem_type): """ choose and calculate NTRM data from the inpud data Paramters _________ df : dataframe of measurement data rem_type : remanence type Returns ________ afxrm : XRM data with "meas_norm" column df3 : with base-vector-subtracted data """ XRM0 = str(rem_type) + '0' df2=subtract_base_vector(df,rem_type) df3=df2[df2.description==rem_type] df4=df2[df2.description.str.contains(XRM0)==True] df5=pd.concat([df3,df4]) #df5.to_csv('_temp.csv',index=True) afxrm=df3 if (len(afxrm)>0): afxrm=afxrm[afxrm.description.str.contains(XRM0)==False] meas0=afxrm.magn_mass.tolist()[0] # get first measurement (after LTD) afxrm['meas_norm']=afxrm['magn_mass']/meas0 # normalized by first measurement return afxrm,df5 def vds(xyz): R=0 cart=xyz.transpose() for i in range(xyz.shape[1]-1): diff=[cart[i][0]-cart[i+1][0],cart[i][1]-cart[i+1][1],cart[i][2]-cart[i+1][2]] dirdiff=pmag.cart2dir(diff) R+=dirdiff[2] return R def wrapper_ltd_pars_mod(args): return ltd_pars_mod(*args) def wrapper_zijd_PCA_calc(args): return zijd_PCA_calc(*args) def zijd_PCA_calc(df,start,end): # used=df[(df.treat_ac_field_mT>=start)&(df.treat_ac_field_mT<=end)] pca_block=used[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm']] pca_block['quality']='g' pca_block=pca_block[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm','quality']].values.tolist() pca_result=pmag.domean(pca_block, 0, len(pca_block)-1, 'DE-BFL') mad=pca_result['specimen_mad'] dang=pca_result['specimen_dang'] spec_n=pca_result['specimen_n'] step_min=pca_result['measurement_step_min'] step_max=pca_result['measurement_step_max'] #print('%5.1f'%(step_min) + ' - %5.1f'%(step_max) + ' mT : MAD= %5.2f'%(mad) \ # + ', DANG= %5.2f'%(dang) + ', N= %2d'%(spec_n)) # return step_min,step_max,mad,dang,spec_n
41.79798
168
0.616381
import matplotlib as mpl import matplotlib.pyplot as plt import multiprocessing as multi import numpy as np import os import pandas as pd import pmagpy import pmagpy.ipmag as ipmag import pmagpy.pmag as pmag import pmagpy.pmagplotlib as pmagplotlib import re import scipy.integrate as integrate import scipy.stats as stats import seaborn as sns import SPD.lib.leastsq_jacobian as lib_k import sys from datetime import datetime as dt from importlib import reload from multiprocessing import Pool from scipy.stats import linregress def API_param_combine(sid_df,afnrm,aftrm1,trm1_star_min,minN): # ## calculating first heating parameters ntrmRegs1=[] # used_df=sid_df[sid_df.treat>=trm1_star_min] used_df=used_df[['treat','nrm','trm1_star']] trm1_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + [[used_df, afnrm,\ used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm1_star','nrm']] p=Pool(multi.cpu_count()) ntrmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) ntrmRegs1.columns=['n_n','slope_n','r_n','dAIC_n','frac_n',\ 'step_min_n','step_max','beta_n','krv_n','krvd_n','f_resid_n'] p.close() p.terminate() print('[calculated for', len(ntrmRegs1),\ 'step-combinations for 1st heating parameters',\ '(', trm1_star_min, '-', trm1_star_max, 'mT)]') #print(ntrmRegs1) # ## calculating second heating parameters trmRegs1=[] # interval serach from ZERO up to MAX trm2_star_min=sid_df['treat'].tolist()[0] used_df=sid_df[sid_df.treat>=trm2_star_min] used_df=used_df[['treat','trm1','trm2_star']] trm2_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + [[used_df, aftrm1,\ used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm2_star','trm1']] p=Pool(multi.cpu_count()) trmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) trmRegs1.columns=['n_t','slope_t','r_t','dAIC_t','frac_t',\ 'step_min_t','step_max','beta_t','krv_t','krvd_t','f_resid_t'] p.close() p.terminate() print('[calculated for', len(trmRegs1),\ 'step-combinations for 2nd heating parameters',\ '(', trm2_star_min, '-', trm2_star_max, 'mT)]') #print(trmRegs1) # print('[merge the combinations for H_min_n >= H_min_t with common H_max]') combinedRegs0=[] combinedRegs0=pd.merge(ntrmRegs1, trmRegs1, on='step_max', how='outer') combinedRegs1=[] combinedRegs1=combinedRegs0[combinedRegs0.step_min_n>=combinedRegs0.step_min_t] print(' ', len(combinedRegs0), ' cominbnations --> ',\ len(combinedRegs1), ' cominbnations') #print(combinedRegs1) # ## calculating dAPI(difference between resultantAPI/expectedAPI) #aftrm10=sid_data[sid_data.description.str.contains('TRM10')] # set lab_field #if (len(aftrm10)>0): lab_field=aftrm10.treat_dc_field.tolist()[0] #combinedRegs1['dAPI']=abs(1 - combinedRegs1['slope_n'] * lab_field / True_API) #print(combinedRegs1) #screened=combinedRegs1 # return combinedRegs1 def clean_duplicates(df,type): clean_df=df[ ((df['step']==0) &(df.XRM==type) )==False] duplicate=df[ ((df['step']==0) &(df.XRM==type) )==True].tail(1) df=pd.concat((clean_df,duplicate)) df.sort_values(by='number',inplace=True) return df def convert_ts_dspin(infile, citations, instrument, ARM_DC_field): # info=pd.read_csv(infile,nrows=4,header=None)[0] weightline=info.loc[info.str.contains('weight')==True] #weight_gm=float(weightline.str.split().values[-1][-1][:-1]) weight_gm=float(re.findall("\d+\.\d+", str(weightline))[0]) IDline=info.loc[info.str.contains('\$')==True].str.split().values[-1] specimen,azimuth,dip,lab_field_uT=IDline[1],float(IDline[2]),float(IDline[3]),float(IDline[4]) site=specimen.split('-')[0] sample=site+'-'+specimen.split('-')[1] # columns=['XRM','step','magn_mass','dir_inc','dir_dec','Smag'] lab_field=lab_field_uT*1e-6 # convert from uT to T data=pd.read_csv(infile,delim_whitespace=True,header=None,skiprows=4) data.columns=columns data['dir_dec']=data['dir_dec']%360 data=data[data.XRM.str.contains('#')==False] # # set some defaults data['description']="" data['specimen']=specimen data['sample']=sample # assume specimen=sample data['site']=site data['weight'],weight=weight_gm*1e-3,weight_gm*1e-3 # use weight in kg data['azimuth']=azimuth data['dip']=dip data['treat_temp']=273. data['treat_ac_field']=data['step']*1e-3 # convert mT to T data['treat_dc_field']=0 data['treat_dc_field_phi']="" data['treat_dc_field_theta']="" data['meas_temp']=273. data['citations']=citations data['software_packages'],version=pmag.get_version(),pmag.get_version() data['instrument_codes']=instrument data['standard']='u' # set to unknown data['quality']='g' # set to good as default methstring='LP-PI-TRM:LP-PI-ALT-AFARM:LP-LT' data['method_codes']=methstring # data=data[((data['step']!=0) & (data.XRM=='ARM00'))==False] # delete all but first ARM00 data=data[((data['step']!=0) & (data.XRM=='ARM10'))==False] # delete all but first ARM10 data=data[((data['step']!=0) & (data.XRM=='ARM20'))==False] # delete all but first ARM20 ## delete the extra step 0 steps for ARM0, ARM1 & ARM2 data['number'] = range(len(data)) # data=clean_duplicates(data,'ARM0') data=clean_duplicates(data,'ARM1') data=clean_duplicates(data,'ARM2') data=clean_duplicates(data,'TRM10') # add descriptions for plotting data.loc[(data.XRM.str.contains('NRM')==True),'description']='NRM' data.loc[(data.XRM.str.contains('NRM0')==True),'description']='NRM0' data.loc[(data.XRM.str.contains('ARM0')==True),'description']='ARM0' data.loc[(data.XRM.str.contains('ARM00')==True),'description']='ARM00' data.loc[(data.XRM.str.contains('TRM1')==True),'description']='TRM1' data.loc[(data.XRM.str.contains('TRM10')==True),'description']='TRM10' data.loc[(data.XRM.str.contains('ARM1')==True),'description']='ARM1' data.loc[(data.XRM.str.contains('ARM10')==True),'description']='ARM10' data.loc[(data.XRM.str.contains('TRM2')==True),'description']='TRM2' data.loc[(data.XRM.str.contains('TRM20')==True),'description']='TRM20' data.loc[(data.XRM.str.contains('ARM2')==True),'description']='ARM2' data.loc[(data.XRM.str.contains('ARM20')==True),'description']='ARM20' # ARM0_step=data[ (data.XRM.str.contains('ARM0')==True)].head(1) if (len(ARM0_step)>0): ARM0_phi=ARM0_step['dir_dec'].values[0] ARM0_theta=ARM0_step['dir_inc'].values[0] # TRM1_step=data[ (data.XRM.str.contains('TRM1')==True)].head(1) if (len(TRM1_step)>0): TRM1_phi=TRM1_step['dir_dec'].values[0] TRM1_theta=TRM1_step['dir_inc'].values[0] # ARM1_step=data[ (data.XRM.str.contains('ARM1')==True)].head(1) if (len(ARM1_step)>0): ARM1_phi=ARM1_step['dir_dec'].values[0] ARM1_theta=ARM1_step['dir_inc'].values[0] # TRM2_step=data[ (data.XRM.str.contains('TRM2')==True)].head(1) if (len(TRM2_step)>0): TRM2_phi=TRM2_step['dir_dec'].values[0] TRM2_theta=TRM2_step['dir_inc'].values[0] # ARM2_step=data[ (data.XRM.str.contains('ARM2')==True)].head(1) if (len(ARM2_step)>0): ARM2_phi=ARM2_step['dir_dec'].values[0] ARM2_theta=ARM2_step['dir_inc'].values[0] # # add in method codes # NRM LTD demag data.loc[(data.XRM.str.contains('NRM0')==True),'method_codes']=\ 'LT-NO:LP-DIR-AF:'+methstring data.loc[((data['step']==0) &(data.XRM=='NRM')),'method_codes']=\ 'LT-LT-Z:LP-DIR-AF:'+methstring data.loc[((data['step']!=0) &(data.XRM=='NRM')),'method_codes']=\ 'LT-AF-Z:LP-DIR-AF:LT-AF-Z-TUMB:'+methstring # ARM0 LTD DEMAG data.loc[(data.XRM.str.contains('ARM00')==True),'method_codes']=\ 'LT-AF-I:LT-NRM-PAR:LP-ARM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='ARM0')),'method_codes']=\ 'LT-AF-I:LT-NRM-PAR:LT-LT-Z:LP-ARM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='ARM0')),'method_codes']=\ 'LT-AF-Z:LP-ARM-AFD:LT-AF-Z-TUMB:'+methstring # TRM1 LTD DEMAG data.loc[(data.XRM.str.contains('TRM10')==True),'method_codes']=\ 'LT-T-I:LP-TRM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='TRM1')),'method_codes']=\ 'LT-LT-Z:LP-TRM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='TRM1')),'method_codes']=\ 'LT-AF-Z:LP-TRM-AFD:LT-AF-Z-TUMB:'+methstring # ARM1 LTD DEMAG data.loc[(data.XRM.str.contains('ARM10')==True),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LP-ARM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='ARM1')),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LT-LT-Z:LP-ARM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='ARM1')),'method_codes']=\ 'LT-AF-Z:LP-ARM-AFD:LT-AF-Z-TUMB:'+methstring # TRM2 LTD DEMAG data.loc[(data.XRM.str.contains('TRM20')==True),'method_codes']=\ 'LT-T-I:LP-TRM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='TRM2')),'method_codes']=\ 'LT-LT-Z:LP-TRM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='TRM2')),'method_codes']=\ 'LT-AF-Z:LP-TRM-AFD:LT-AF-Z-TUMB:'+methstring # ARM2 LTD DEMAG data.loc[(data.XRM.str.contains('ARM20')==True),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LP-ARM-AFD:'+methstring data.loc[((data['step']==0) &(data.XRM=='ARM2')),'method_codes']=\ 'LT-AF-I:LT-TRM-PAR:LT-LT-Z:LP-ARM-AFD:'+methstring data.loc[((data['step']!=0) &(data.XRM=='ARM2')),'method_codes']=\ 'LT-AF-Z:LP-ARM-AFD:LT-AF-Z-TUMB:'+methstring # data['experiment'],experiment=specimen+':'+methstring,specimen+':'+methstring # # reset lab field directions to TRM direction for TRM steps data.loc[(data.method_codes.str.contains('LT-T-I')==True),'treat_dc_field']=lab_field if (len(TRM1_step)>0): data.loc[( (data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM1'))),'treat_dc_field_phi']=TRM1_phi data.loc[((data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM1'))),'treat_dc_field_theta']=TRM1_theta if (len(TRM2_step)>0): data.loc[( (data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM2'))),'treat_dc_field_phi']=TRM2_phi data.loc[((data.method_codes.str.contains('LT-T-I')==True)&\ (data.description.str.contains('TRM2'))),'treat_dc_field_theta']=TRM2_theta # # reset lab field directions to ARM direction for ARM steps data.loc[(data.method_codes.str.contains('LT-AF-I')==True),'treat_dc_field']=ARM_DC_field if (len(ARM0_step)>0): data.loc[( (data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM0'))),'treat_dc_field_phi']=ARM0_phi data.loc[((data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM0'))),'treat_dc_field_theta']=ARM0_theta # if (len(ARM1_step)>0): data.loc[( (data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM1'))),'treat_dc_field_phi']=ARM1_phi data.loc[((data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM1'))),'treat_dc_field_theta']=ARM1_theta # if (len(ARM2_step)>0): data.loc[( (data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM2'))),'treat_dc_field_phi']=ARM2_phi data.loc[((data.method_codes.str.contains('LT-AF-I')==True)&\ (data.description.str.contains('ARM2'))),'treat_dc_field_theta']=ARM2_theta # # temperature of liquid nitrogen data.loc[(data.method_codes.str.contains('LT-LT-Z')==True),'treat_temp']=77 # meas_data=data[['specimen','magn_mass','dir_dec','dir_inc','treat_temp','treat_ac_field',\ 'treat_dc_field','treat_dc_field_phi','treat_dc_field_theta','meas_temp',\ 'citations','number','experiment','method_codes','software_packages',\ 'instrument_codes','standard','quality','description']] meas_data['magn_moment']=meas_data['magn_mass']*weight # meas_data['sequence']=meas_data.index spec_data=pd.DataFrame([{'specimen':specimen,'sample':sample,'weight':weight,\ 'azimuth':0,'dip':0,'experiments':experiment,'result_quality':'g',\ 'method_codes':methstring,'citations':citations,'software_packages':version}]) # spec_data['result_type']='i' spec_data['result_quality']='g' spec_data['description']=" " if azimuth==0 and dip==0: spec_data['dir_tilt_correction']=-1 else: spec_data['dir_tilt_correction']=0 samp_data=spec_data[['sample']] samp_data['site']=site samp_data['azimuth']=0 samp_data['dip']=0 samp_data['orientation_quality']='g' samp_data['description']=\ 'measurements directions corrected with: azimuth='+str(azimuth)+' dip='+str(dip) # # write out the data file return meas_data, spec_data, samp_data def find_best_API_portion_r(combinedRegs1,minFrac,minR,minSlopeT,maxSlopeT): """ Finds the best portion for NRM-TRM1* and TRM1-TRM2* plots by r criteria of Yamamoto+2003 (1) calculate API statistics for all possible coercivity intervals (2) discard the statistics not satisfying the usual selection criteria (when applicable) omitted - (3) sort the statistics by dAPI (rel. departure from the expected API), and select the best 10 statistics (4) sort the statistics by frac_n, and select the best one Curvature (k) calculation is made by the code for Arai plot by Lisa. This is done for inverterd-X (e.g. -TRM1, -ARM1, ..) and original-Y (e.g. NRM, ARM0, ..). The inverted-X is offset (positive) to zero as a minimum. revised 2021/09/06 __________ combinedRegs1 : combined API parameters minFrac,minR,minSlopeT,maxSlopeT : thresholds for the r criteria Returns ______ trm1_star_min trm1_star_max trm2_star_min trm2_star_max """ print('[criteria, 2nd heating]') # screened=combinedRegs1[combinedRegs1.frac_t>=minFrac] if (len(screened)>0): print(' Frac_t >=', minFrac, ': ', len(screened),'step-combinations') else: print(' Frac_t >=', minFrac, ': no step-combinations satisfied') screened=combinedRegs1 # screened2=screened[screened.r_t>=minR] if (len(screened2)>0): print(' r_t >=', minR, ': ', len(screened2),'step-combinations') screened=screened2 else: print(' r_t >=', minR, ': no step-combinations satisfied') # screened3=screened[(screened.slope_t>=minSlopeT)\ &(screened.slope_t<=maxSlopeT)] if (len(screened3)>0): print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ ': ', len(screened3),'step-combinations') screened=screened3 else: print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ ': no step-combinations satisfied') # print('[criteria, 1st heating]') # screened4=screened[screened.frac_n>=minFrac] if (len(screened4)>0): print(' Frac_n >=', minFrac, ': ', len(screened4),'step-combinations') screened=screened4 else: print(' Frac_n >=', minFrac, ': no step-combinations satisfied') # screened5=screened[screened.r_n>=minR] if (len(screened5)>0): print(' r_n >=', minR, ': ', len(screened5),'step-combinations') screened=screened5 else: print(' r_n >=', minR, ': no step-combinations satisfied') ## sort by dAPI, then select top 10 #print('[sort by dAPI and select the top 10 data]') #screened=screened.sort_values('dAPI') #screened=screened.iloc[:10] # # sort by frac_n, then select the best print('[sort by frac_n and select the best step-combination]') screened=screened.sort_values('frac_n', ascending=False) screened_best_fn=screened.iloc[:1] #print(screened) trm2_star_min=screened_best_fn['step_min_t'].iloc[0] trm2_star_max=screened_best_fn['step_max'].iloc[0] trm1_star_min=screened_best_fn['step_min_n'].iloc[0] trm1_star_max=screened_best_fn['step_max'].iloc[0] # return trm1_star_min, trm1_star_max, trm2_star_min, trm2_star_max, screened def find_best_API_portion_k(combinedRegs1,maxBeta,maxFresid,maxKrv): """ Finds the best portion for NRM-TRM1* and TRM1-TRM2* plots by k' criteria of Lloyd+2021 (1) calculate API statistics for all possible coercivity intervals (2) discard the statistics not satisfying the Beta criterion (0.1) and the k' criterion (0.2) omitted - (3) sort the statistics by dAPI (rel. departure from the expected API), and select the best 10 statistics (4) sort the statistics by frac_n, and select the best one __________ combinedRegs1 : combined API parameters minFrac,minR,minSlopeT,maxSlopeT : thresholds for the r criteria Returns ______ trm1_star_min trm1_star_max trm2_star_min trm2_star_max """ print('[criteria, 2nd heating]') screened=combinedRegs1 # #screened=combinedRegs1[combinedRegs1.frac_t>=minFrac] #if (len(screened)>0): # print(' Frac_t >=', minFrac, ': ', len(screened),'step-combinations') #else: # print(' Frac_t >=', minFrac, ': no step-combinations satisfied') # screened=combinedRegs1 ## #screened2=screened[screened.krvd_t<=maxKrv] #if (len(screened2)>0): # print(' k\' <=', maxKrv, ': ', len(screened2),'step-combinations') # screened=screened2 #else: # print(' k\' <=', maxKrv, ': no step-combinations satisfied') ## #screened3=screened[(screened.slope_t>=minSlopeT)\ # &(screened.slope_t<=maxSlopeT)] #if (len(screened3)>0): # print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ # ': ', len(screened3),'step-combinations') # screened=screened3 #else: # print(' ', minSlopeT, '<= slope_t <=', maxSlopeT, \ # ': no step-combinations satisfied') ## print('[criteria, 1st heating]') # #screened4=screened[screened.frac_n>=minFrac] #if (len(screened4)>0): # print(' Frac_n >=', minFrac, ': ', len(screened4),'step-combinations') # screened=screened4 #else: # print(' Frac_n >=', minFrac, ': no step-combinations satisfied') # screened5=screened[screened.beta_n<=maxBeta] if (len(screened5)>0): print(' beta <=', maxBeta, ': ', len(screened5),'step-combinations') screened=screened5 else: print(' beta <=', maxBeta, ': no step-combinations satisfied') # screened6=screened[screened.f_resid_n<=maxFresid] if (len(screened6)>0): print(' f_resid <=', maxBeta, ': ', len(screened6),'step-combinations') screened=screened6 else: print(' f_resid <=', maxBeta, ': no step-combinations satisfied') # screened7=screened[abs(screened.krvd_n)<=maxKrv] if (len(screened7)>0): print(' abs_k\' <=', maxKrv, ': ', len(screened7),'step-combinations') screened=screened7 else: print(' abs_k\' <=', maxKrv, ': no step-combinations satisfied') ## sort by dAPI, then select top 10 #print('[sort by dAPI and select the top 10 data]') #screened=screened.sort_values('dAPI') #screened=screened.iloc[:10] # sort by frac_n, then select the best print('[sort by frac_n and select the best step-combination]') screened=screened.sort_values('frac_n', ascending=False) screened_fn=screened.iloc[:1] #print(screened) trm2_star_min=screened_fn['step_min_t'].iloc[0] trm2_star_max=screened_fn['step_max'].iloc[0] trm1_star_min=screened_fn['step_min_n'].iloc[0] trm1_star_max=screened_fn['step_max'].iloc[0] # return trm1_star_min, trm1_star_max, trm2_star_min, trm2_star_max, screened def find_mdf(df): """ Finds the median destructive field for AF demag data Parameters __________ df : dataframe of measurements Returns ______ mdf : median destructive field """ mdf_df=df[df.meas_norm<=0.5] mdf_high=mdf_df.treat_ac_field_mT.values[0] mdf_df=df[df.meas_norm>=0.5] mdf_low=mdf_df.treat_ac_field_mT.values[-1] mdf=int(0.5*(mdf_high+mdf_low)) return mdf def ltd_pars(df1,afxrm,step_min,step_max,xkey,ykey): # used1=df1[(df1.treat>=step_min)&(df1.treat<=step_max)] n=len(used1) slope, b, r, p, stderr =\ linregress(used1[xkey].values.astype('float'),\ used1[ykey].values.astype('float')) coeffs1=np.polyfit(used1[xkey].values.astype('float'),used1[ykey].values.astype('float'),1) coeffs2=np.polyfit(used1[xkey].values.astype('float'),used1[ykey].values.astype('float'),2) # beta=stderr/abs(slope) # krv=lib_k.AraiCurvature(x=df1[xkey],y=df1[ykey])[0] krv_dash=lib_k.AraiCurvature(x=used1[xkey].values.astype('float'),\ y=used1[ykey].values.astype('float'))[0] # linY=np.polyval(coeffs1,used1[xkey].values.astype('float')) curveY=np.polyval(coeffs2,used1[xkey].values.astype('float')) chi1, chi2 = (used1[ykey]-linY)**2, (used1[ykey]-curveY)**2 chi1sum, chi2sum = chi1.sum(), chi2.sum() dAIC = n * (np.log(chi1sum) - np.log(chi2sum)) - 2 # used2=afxrm[(afxrm.treat_ac_field_mT>=step_min)&(afxrm.treat_ac_field_mT<=step_max)] tblock=used2[['dir_dec','dir_inc','meas_norm']] tall=afxrm[['dir_dec','dir_inc','meas_norm']] XYZ, XYZall = pmag.dir2cart(tblock).transpose(), pmag.dir2cart(tall).transpose() Rused, Rall = vds(XYZ), vds(XYZall) frac=Rused/Rall # y_int = coeffs1[1] y_prime = [] for i in range(0, len(used1[ykey])): y_prime.append(0.5 * (used1[ykey].values.astype('float')[i] \ + slope * used1[xkey].values.astype('float')[i] + y_int)) #print(y_prime) delta_y_prime = abs(max(y_prime) - min(y_prime)) f_resid = abs(y_int) / delta_y_prime #print('f_resid=',f_resid) # return n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,used1 def ltd_pars_mod(df1,afxrm,step_min,step_max,xkey,ykey): # n, slope, b, r, stderr, coeffs1, coeffs2, dAIC, frac, beta, krv, krv_dash, f_resid, used1 =\ ltd_pars(df1,afxrm,step_min,step_max,xkey,ykey) # return n,slope,r,dAIC,frac,step_min,step_max,beta,krv,krv_dash,f_resid def opt_interval_first_heating(zijd_min, sid_df, afnrm, minN, minFrac, minR): # ntrmRegs1=[] trm1_star_min=zijd_min used_df=sid_df[sid_df.treat>=trm1_star_min] used_df=used_df[['treat','nrm','trm1_star']] trm1_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + \ [[used_df, afnrm, used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm1_star','nrm']] p=Pool(multi.cpu_count()) ntrmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) ntrmRegs1.columns=['n_n','slope_n','r_n','dAIC_n','frac_n',\ 'step_min','step_max','beta_n','krv_n','krvd_n','f_resid_n'] p.close() p.terminate() #print(ntrmRegs1) screened=ntrmRegs1 screened2=ntrmRegs1[ntrmRegs1.frac_n>=minFrac] if (len(screened2)>0): screened=screened2 screened3=screened[ntrmRegs1.r_n>=minR] if (len(screened3)>0): screened=screened3 screened=screened.sort_values('dAIC_n') screened=screened.iloc[:10] #print(screened) # decide optimum interval trm1_star_min = screened.loc[screened.frac_n.idxmax(), "step_min"] trm1_star_max = screened.loc[screened.frac_n.idxmax(), "step_max"] print('opt interval NRM-TRM1*: %5.1f'%(trm1_star_min) \ + ' - %5.1f'%(trm1_star_max) + ' mT') # return trm1_star_min, trm1_star_max def opt_interval_second_heating(sid_df, aftrm1, minN, minFrac, minR, minSlopeT, maxSlopeT): # trmRegs1=[] # interval serach from ZERO up to MAX trm2_star_min=sid_df['treat'].tolist()[0] used_df=sid_df[sid_df.treat>=trm2_star_min] used_df=used_df[['treat','trm1','trm2_star']] trm2_star_max=used_df['treat'].tolist()[len(used_df)-1] variables = [] for i in range(len(used_df)-minN+1): for j in range(len(used_df)-minN+1-i): variables = variables + [[used_df, aftrm1,\ used_df['treat'].tolist()[i],\ used_df['treat'].tolist()[i+j+minN-1],'trm2_star','trm1']] p=Pool(multi.cpu_count()) trmRegs1=pd.DataFrame(p.map(wrapper_ltd_pars_mod, variables)) trmRegs1.columns=['n_t','slope_t','r_t','dAIC_t','frac_t',\ 'step_min','step_max','beta_t','krv_t','krvd_t','f_resid_t'] p.close() p.terminate() #print(trmRegs1) screened=trmRegs1[trmRegs1.frac_t>=minFrac] screened2=screened[trmRegs1.r_t>=minR] if (len(screened2)>0): screened=screened2 screened3=screened[(trmRegs1.slope_t>=minSlopeT)&(trmRegs1.slope_t<=maxSlopeT)] if (len(screened3)>0): screened=screened3 screened=screened.sort_values('dAIC_t') screened=screened.iloc[:10] #print(screened) # decide optimum interval trm2_star_min = screened.loc[screened.frac_t.idxmax(), "step_min"] trm2_star_max = screened.loc[screened.frac_t.idxmax(), "step_max"] print('opt interval TRM1-TRM2*: %5.1f'%(trm2_star_min) \ + ' - %5.1f'%(trm2_star_max) + ' mT') # return trm2_star_min, trm2_star_max def opt_interval_zij(afnrm, minN): # # optimum interval serach from ZERO up to MAX variables = [] for i in range(len(afnrm)-minN+1): for j in range(len(afnrm)-minN+1-i): variables = variables + [[afnrm,\ afnrm['treat_ac_field_mT'].tolist()[i],\ afnrm['treat_ac_field_mT'].tolist()[i+j+minN-1]]] p=Pool(multi.cpu_count()) zijPCArsts1=pd.DataFrame(p.map(wrapper_zijd_PCA_calc, variables)) zijPCArsts1.columns=['step_min','step_max','mad','dang','spec_n'] zijPCArsts1['mad+dang']=zijPCArsts1['mad']+zijPCArsts1['dang'] p.close() p.terminate() #print(zijPCArsts1) screened=zijPCArsts1.sort_values('mad+dang') screened=screened.iloc[:10] #print(screened) # decide optimum interval step_min_mad_min = screened.loc[screened['mad'].idxmin(), "step_min"] step_max_mad_min = screened.loc[screened['mad'].idxmin(), "step_max"] step_min_dang_min = screened.loc[screened['dang'].idxmin(), "step_min"] step_max_dang_min = screened.loc[screened['dang'].idxmin(), "step_max"] step_min_opt_zij = step_min_mad_min \ if step_min_mad_min < step_min_dang_min else step_min_dang_min step_max_opt_zij = step_max_mad_min \ if step_max_mad_min > step_max_dang_min else step_max_dang_min print('opt interval Zijderveld: %5.1f'%(step_min_opt_zij)+ ' - %5.1f'%(step_max_opt_zij) + ' mT') # return step_min_opt_zij, step_max_opt_zij def plot_af_xrm(sid,sdf,ax,df,rem_type): # df=df.reset_index() # if 'ARM' in rem_type: xrm0=df.magn_mass_diff.tolist()[0] df0=sdf[sdf.description.str.contains(rem_type+'0')] df0=df0.tail(1) df0['meas_norm']=df0['magn_mass_diff']/xrm0 dflt=df0[df0.method_codes.str.contains('LT-LT-Z')==True] else: xrm0=df.magn_mass.tolist()[0] df0=sdf[sdf.description.str.contains(rem_type+'0')] df0=df0.tail(1) df0['meas_norm']=df0['magn_mass']/xrm0 dflt=df[df.method_codes.str.contains('LT-LT-Z')==True] #print(df0) #print(dflt) #df0=df0.reset_index() #dflt=dflt.reset_index() # afdmax=df['treat_ac_field_mT'].max() mdf=find_mdf(df) # # plot definitions ax.set_title(sid+'\n '+str(rem_type)+'$_0$='+'%8.2e'%(xrm0)\ +' Am$^2$/kg ; MDF ~ '+str(mdf)+' mT') ax.set_xlabel('alternating field (mT)') ax.set_ylabel(str(rem_type)+'/'+str(rem_type)+'$_0$') ax.set_xlim(-10,200) ymax=df.meas_norm.max() if df0['meas_norm'].max() > 1.0: ax.set_ylim(-0.05,df0['meas_norm'].max()*1.1) else: ax.set_ylim(-0.05,ymax*1.1) # dotted line for each 0.5 interavl for Y for i in range(int(ymax//0.5)+1): ax.axhline(0.5*i,linestyle='dotted') # # plot main data ax.plot(df['treat_ac_field_mT'],df['meas_norm'],'ro') ax.plot(df['treat_ac_field_mT'],df['meas_norm'],'r-') # put on the last AF step magnetization ax.text(df['treat_ac_field_mT'].values[-1]+.05,\ df['meas_norm'].values[-1]+.02,'%5.3f'%(df['meas_norm'].values[-1])) # plot the data at af=0 if (len(df0)>0): ax.plot(df0['treat_ac_field_mT'],df0['meas_norm'],'wo',markeredgecolor='black') ax.text(df0['treat_ac_field_mT']+.075,df0['meas_norm']+.02,\ '%5.3f'%(df0['meas_norm'])) if (len(dflt)>0): ax.plot(dflt['treat_ac_field_mT'],dflt['meas_norm'],'bo') # # normalized rations at af=0 and afmax ratio_0 = 0 if (len(df0)>0): ratio_0 = df0['meas_norm'].values[0] # ratio_afmax = 0 if (len(df)>0): ratio_afmax = df['meas_norm'].values[-1] # return afdmax, mdf, xrm0*1e6, ratio_0, ratio_afmax def plot_ntrm_arm(sid,ax,df,afxrm,step_min,step_max,xkey,ykey): # fac=1e6 unit=' $\mu$Am$^2$/kg' # #fac=1e3 #unit=' mAm$^2$/kg' # n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,selected_df =\ ltd_pars(df,afxrm,step_min,step_max,xkey,ykey) # xymax=1.1*fac*np.array([[df[ykey].max(),df[xkey].max()]]).max() tick=[float('{:.1e}'.format(xymax*(i+1)/4)) for i in range(4)] if (slope<1.5): [xl, yl1, yl2, yl3, yl4]=[0.10, 0.90, 0.85, 0.80, 0.75] if (slope>=1.5): [xl, yl1, yl2, yl3, yl4]=[0.50, 0.20, 0.15, 0.10, 0.05] # linY=np.polyval(coeffs1,selected_df[xkey].values.astype('float')) # ax.set_title(sid) ax.set_xlabel(xkey.upper()+unit) ax.set_ylabel(ykey.upper()+unit) ax.set_xlim(0,xymax) ax.set_ylim(0,xymax) ax.set_xticks(tick) ax.set_yticks(tick) # ax.plot(df[xkey]*fac,df[ykey]*fac,'wo',markeredgecolor='black') ax.plot(selected_df[xkey]*fac,selected_df[ykey]*fac,'ko') ax.plot(fac*selected_df[xkey].values.astype('float'),fac*linY,'r-') ax.text(xl, yl1,'slope= %5.3f'%(slope)+'$\pm$%5.3f'%(stderr),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl2,'r= %5.3f'%(r)+', N = '+str(n),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl3,'k\'= %5.3f'%(krv_dash)+' (k= %5.3f'%(krv)+')',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl4,'('+str(int(step_min))+'-'+str(int(step_max))+' mT)',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) # return slope,r,n,krv,krv_dash def plot_pint_main(sid,ax,df1,afxrm,xkey,ykey,step_min,step_max,aftrm1,aftrm2,spec_prv_df,criteria,minR,minFrac,minSlopeT,maxSlopeT,maxBeta,maxFresid,maxKrv,lab_field): # tick_div=4 # fac=1e6 unit=' $\mu$Am$^2$/kg' # #fac=1e3 #unit=' mAm$^2$/kg' # n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,selected_df =\ ltd_pars(df1,afxrm,step_min,step_max,xkey,ykey) # xymax=1.1*fac*np.array([[df1[xkey].max(),df1[ykey].max()]]).max() tick=[float('{:.1e}'.format(xymax*(i+1)/tick_div)) for i in range(tick_div)] if (slope<1.5): [xl, yl1, yl2, yl3, yl4, yl5, yl6]=[0.10, 0.90, 0.85, 0.80, 0.75, 0.70, 0.65] if (slope>=1.5): [xl, yl1, yl2, yl3, yl4, yl5, yl6]=[0.50, 0.35, 0.30, 0.25, 0.20, 0.15, 0.10] # linY=np.polyval(coeffs1,selected_df[xkey].values.astype('float')) # pint='rejected' if (xkey=='trm1_star') & (ykey=='nrm'): if (len(aftrm1)>0) & (len(aftrm2)>0): slope_t=float(spec_prv_df.loc[sid,'slope_TRM1-TRM2*']) if ('reg' in criteria) & (r>=minR) & (frac>=minFrac) \ & (slope_t>=minSlopeT) & (slope_t<=maxSlopeT): pint='%7.2f'%(slope*lab_field*1e6)+' $\mu$T' if ('krv' in criteria) & (beta<=maxBeta) & (f_resid<=maxFresid) & (krv_dash<=maxKrv): pint='%7.2f'%(slope*lab_field*1e6)+' $\mu$T' ax.set_title(sid) #if (xkey=='trm1_star') & (ykey=='nrm'): # ax.set_title(sid+' (B$_{anc}$=%7.2f'%(slope*lab_field*1e6)+' $\mu$T)') #else: ax.set_title(sid) ax.set_xlim(0,xymax) ax.set_ylim(0,xymax) ax.set_xticks(tick) ax.set_yticks(tick) if (xkey=='trm1_star') & (ykey=='nrm'): ax.set_xlabel('TRM1*'+unit) ax.set_ylabel('NRM'+unit) if (xkey=='trm2_star') & (ykey=='trm1'): ax.set_xlabel('TRM2*'+unit) ax.set_ylabel('TRM1'+unit) ax.plot([0,xymax],[0,xymax],color='g',linestyle='dotted') # ax.plot(df1[xkey]*fac, df1[ykey]*fac, 'wo', markeredgecolor='black') ax.plot(selected_df[xkey]*fac, selected_df[ykey]*fac, 'ko') ax.plot(fac*selected_df[xkey].values.astype('float'),fac*linY,'r-') # ax.text(xl, yl1,'slope= %5.3f'%(slope)+'$\pm$%5.3f'%(stderr),\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) #ax.text(xl, yl2,'r= %5.3f'%(r)+', k\'= %5.3f'%(krv_dash)+', N = '+str(n),\ ax.text(xl, yl2,'r= %5.3f'%(r)+', N= '+str(n),\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) ax.text(xl, yl3,'FRAC= '+'%5.3f'%(frac)+', $\Delta$AIC= '+'%5.1f'%(dAIC),\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) #ax.text(xl, yl4,'$\Delta$AIC = '+'%5.1f'%(dAIC),\ ax.text(xl, yl4,'k\'= %5.3f'%(krv_dash)+' (k= %5.3f'%(krv)+')',\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) if (xkey=='trm1_star') & (ykey=='nrm'): ax.text(xl, yl5,'B$_{anc}$= '+pint,\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) ax.text(xl, yl6,'('+str(int(step_min))+'-'+str(int(step_max))+' mT)',\ horizontalalignment='left', verticalalignment='center', transform = ax.transAxes) # return slope,r,n,frac,dAIC,krv,krv_dash,f_resid,pint def plot_xrm_xrm2_r2(sid,ax,df,afxrm,xkey,ykey,step_min,step_max): # fac=1e6 unit=' $\mu$Am$^2$/kg' # #fac=1e3 #unit=' mAm$^2$/kg' # n,slope,b,r,stderr,coeffs1,coeffs2,dAIC,frac,beta,krv,krv_dash,f_resid,selected_df =\ ltd_pars(df,afxrm,step_min,step_max,xkey,ykey) if 'trm1' in xkey: xymax=1.1*fac*np.array([[df['trm1'].max(),df['nrm'].max()]]).max() if 'trm2' in xkey: xymax=1.1*fac*np.array([[df['trm1'].max(),df['trm2'].max(),df['nrm'].max()]]).max() if 'arm1' in xkey: xymax=1.1*fac*np.array([[df['arm0'].max(),df['arm1'].max()]]).max() if 'arm2' in xkey: xymax=1.1*fac*np.array([[df['arm0'].max(),df['arm1'].max(),df['arm2'].max()]]).max() tick=[float('{:.1e}'.format(xymax*(i+1)/4)) for i in range(4)] if (slope<1.5): [xl, yl1, yl2, yl3, yl4]=[0.10, 0.90, 0.85, 0.80, 0.75] if (slope>=1.5): [xl, yl1, yl2, yl3, yl4]=[0.50, 0.20, 0.15, 0.10, 0.05] ax.set_title(sid) ax.set_xlabel(xkey.upper()+unit) ax.set_ylabel(ykey.upper()+unit) ax.set_xlim(0,xymax) ax.set_ylim(0,xymax) ax.set_xticks(tick) ax.set_yticks(tick) # if ykey!='nrm': ax.plot([0,xymax],[0,xymax],color='g',linestyle='dotted') ax.plot(df[xkey]*fac,df[ykey]*fac,'wo',markeredgecolor='black') ax.plot(selected_df[xkey]*fac,selected_df[ykey]*fac,'ko') # ax.text(xl, yl1,'slope= %5.3f'%(slope)+'$\pm$%5.3f'%(stderr),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl2,'r= %5.3f'%(r)+', N = '+str(n),\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl3,'k\'= %5.3f'%(krv_dash)+' (k= %5.3f'%(krv)+')',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) ax.text(xl, yl4,'('+str(int(step_min))+'-'+str(int(step_max))+' mT)',\ horizontalalignment='left', verticalalignment='center',\ transform = ax.transAxes) # return slope,r,n,krv,krv_dash def plot_zijd(sid, sid_data, ax1, ax2, df, step_min, step_max): # # ax1 for equal-size, ax2 for close-up # whole=df used=df[(df.treat_ac_field_mT>=step_min)&(df.treat_ac_field_mT<=step_max)] xrm0=df.magn_mass.tolist()[0] df0=sid_data[sid_data.description.str.contains('NRM0')] if (len(df0.index)>0): df0['meas_norm']=df0['magn_mass']/xrm0 pre_LTD=df0 # ## PCA calculation pca_block=used[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm']] pca_block['quality']='g' pca_block=pca_block[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm','quality']].values.tolist() pca_result=pmag.domean(pca_block, 0, len(pca_block)-1, 'DE-BFL') #print(pca_result) pca_dec=pca_result['specimen_dec'] pca_inc=pca_result['specimen_inc'] pca_mad=pca_result['specimen_mad'] pca_n=pca_result['specimen_n'] # # title, label interval='('+str(int(step_min))+'-'+str(int(step_max))+' mT)' ax1.set_title(sid+' '+interval) ax2.set_title(sid+' '+interval) PCA='PCA: Dec= %7.1f'%(pca_dec)+', Inc= %7.1f'%(pca_inc)+', MAD= %8.2f'%(pca_mad)+', N= %2d'%(pca_n) ax1.set_xlabel(PCA) ax2.set_xlabel(PCA) # ## plot pre-LTD interval if len(list(df0.index))>0: xrm0=pre_LTD[['dir_dec','dir_inc','meas_norm']].values xyz_0=pmag.dir2cart(xrm0).transpose() ax1.plot(xyz_0[0],-xyz_0[1],color='grey',marker='o') ax1.plot(xyz_0[0],-xyz_0[2],color='grey',marker='s') ax2.plot(xyz_0[0],-xyz_0[1],color='grey',marker='o') ax2.plot(xyz_0[0],-xyz_0[2],color='grey',marker='s') # ## plot whole interval if len(list(whole.index))>0: zblock=whole[['dir_dec','dir_inc','meas_norm']] xyz_wl=pmag.dir2cart(zblock).transpose() ax1.plot(xyz_wl[0],-xyz_wl[1],color='grey',marker='o') ax1.plot(xyz_wl[0],-xyz_wl[2],color='grey',marker='s') ax2.plot(xyz_wl[0],-xyz_wl[1],color='grey',marker='o') ax2.plot(xyz_wl[0],-xyz_wl[2],color='grey',marker='s') # ## plot used interval zblock=used[['dir_dec','dir_inc','meas_norm']] XYZ=pmag.dir2cart(zblock).transpose() ax1.plot(XYZ[0],-XYZ[1],'ko') ax1.plot(XYZ[0],-XYZ[2],'ws',markeredgecolor='blue') ax1.plot(XYZ[0],-XYZ[1],'k-') ax1.plot(XYZ[0],-XYZ[2],'k-') ax2.plot(XYZ[0],-XYZ[1],'ko') ax2.plot(XYZ[0],-XYZ[2],'ws',markeredgecolor='blue') ax2.plot(XYZ[0],-XYZ[1],'k-') ax2.plot(XYZ[0],-XYZ[2],'k-') # # put on best fit line Rstart=np.sqrt((XYZ[0][0])**2+(XYZ[1][0])**2+(XYZ[2][0])**2) Rstop=np.sqrt((XYZ[0][-1])**2+(XYZ[1][-1])**2+(XYZ[2][-1])**2) XYZ_start=pmag.dir2cart([pca_dec,pca_inc,Rstart]) XYZ_stop=-1*pmag.dir2cart([pca_dec,pca_inc,Rstop]) ax1.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[1],-XYZ_stop[1]],'r-') ax1.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[2],-XYZ_stop[2]],'r-') ax2.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[1],-XYZ_stop[1]],'r-') ax2.plot([XYZ_start[0],XYZ_stop[0]],[-XYZ_start[2],-XYZ_stop[2]],'r-') # # get max and min [xmax,xmin,ymax,ymin]=[0,0,0,0] if len(list(df0.index))>0: xmax=np.max([xyz_0[0].max(),xyz_wl[0].max()]) xmin=np.min([xyz_0[0].min(),xyz_wl[0].min()]) ymax=np.max([(-xyz_0[1]).max(),(-xyz_0[2]).max(),\ (-xyz_wl[1]).max(),(-xyz_wl[2]).max()]) ymin=np.min([(-xyz_0[1]).min(),(-xyz_0[2]).min(),\ (-xyz_wl[1]).min(),(-xyz_wl[2]).min()]) else: xmax=np.max([xyz_wl[0].max()]) xmin=np.min([xyz_wl[0].min()]) ymax=np.max([(-xyz_wl[1]).max(),(-xyz_wl[2]).max()]) ymin=np.min([(-xyz_wl[1]).min(),(-xyz_wl[2]).min()]) #print(xmin, xmax) #print(ymin, ymax) [xlength, ylength]=[xmax-xmin, ymax-ymin] xylength=max(xlength, ylength) # # plot size adjustment for ax1 div=2 tick1=[float('{:.1e}'.format(-xylength*(i+1)/div)) for i in range(div)] tick2=[0.0] tick3=[float('{:.1e}'.format(xylength*(i+1)/div)) for i in range(div)] tick=tick1 + tick2 + tick3 ax1.plot([-xylength*1.1, xylength*1.1],[0,0],'k-') ax1.set_xlim(-xylength*1.1, xylength*1.1) ax1.plot([0,0], [-xylength*1.1, xylength*1.1],'k-') ax1.set_ylim(-xylength*1.1, xylength*1.1) ax1.set_xticks(tick) ax1.set_yticks(tick) # # plot size adjustment for ax2 if xmin>0: ax2.plot([-xlength*0.1, xmax+xlength*0.1],[0,0],'k-') ax2.set_xlim(-xlength*0.1, xmax+xlength*0.1) if xmin<0: if xmax<0: ax2.plot([xmin-xlength*0.1, xlength*0.1],[0,0],'k-') ax2.set_xlim(xmin-xlength*0.1, xlength*0.1) if xmax>0: ax2.plot([xmin-xlength*0.1, xmax+xlength*0.1],[0,0],'k-') ax2.set_xlim(xmin-xlength*0.1, xmax+xlength*0.1) if ymin>0: ax2.plot([0,0], [-ylength*0.1, ymax+ylength*0.1],'k-') ax2.set_ylim(-ylength*0.1, ymax+ylength*0.1) if ymin<0: if ymax<0: ax2.plot([0,0], [ymin-ylength*0.1, ylength*0.1],'k-') ax2.set_ylim(ymin-ylength*0.1, ylength*0.1) if ymax>0: ax2.plot([0,0], [ymin-ylength*0.1, ymax+ylength*0.1],'k-') ax2.set_ylim(ymin-ylength*0.1, ymax+ylength*0.1) # return pca_dec, pca_inc, pca_mad, pca_n def prep_sid_df(xrm_types, df): # subtract the last treatment step from the others for all types # set afxrm data for all types (afnrm, afarm0, ...) for t in xrm_types: if t=='NRM': afnrm, sd_diff_n =set_NTRM_data(df,t) if t=='TRM1': aftrm1,sd_diff_t1=set_NTRM_data(df,t) if t=='TRM2': aftrm2,sd_diff_t2=set_NTRM_data(df,t) if t=='ARM0': afarm0,sd_diff_a0=set_ARM_data(df,t) if t=='ARM1': afarm1,sd_diff_a1=set_ARM_data(df,t) if t=='ARM2': afarm2,sd_diff_a2=set_ARM_data(df,t) print ('NRM: ',len(afnrm),' data, TRM1:',len(aftrm1),' data, TRM2:',len(aftrm2),' data') print ('ARM0: ',len(afarm0),' data, ARM1:',len(afarm1),' data, ARM2:',len(afarm2),' data') # set data for bi-plot: merged by the treatment steps against each other if (len(afnrm)>0): sid0_df=afnrm[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','nrm'] sid_df=sid0_df sid_data_diff=sd_diff_n if (len(afarm0)>0): sid0_df=afarm0[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','arm0'] sid_df=sid_df[['treat','nrm']].merge(\ sid0_df[['treat','arm0']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_a0]) if (len(aftrm1)>0): sid0_df=aftrm1[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','trm1'] sid_df=sid_df[['treat','nrm','arm0']].merge(\ sid0_df[['treat','trm1']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_t1]) if (len(afarm1)>0): sid0_df=afarm1[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','arm1'] sid_df=sid_df[['treat','nrm','arm0','trm1']].merge(\ sid0_df[['treat','arm1']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_a1]) if (len(aftrm2)>0): sid0_df=aftrm2[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','trm2'] sid_df=sid_df[['treat','nrm','arm0','trm1','arm1']].merge(\ sid0_df[['treat','trm2']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_t2]) if (len(afarm2)>0): sid0_df=afarm2[['treat_ac_field_mT','magn_mass_diff']] sid0_df.columns=['treat','arm2'] sid_df=sid_df[['treat','nrm','arm0','trm1','arm1','trm2']].merge(\ sid0_df[['treat','arm2']], on='treat') sid_data_diff=pd.concat([sid_data_diff,sd_diff_a2]) last_treat=sid_df.treat.max() # need to peel off the last step for division step sid_df=sid_df[sid_df.treat<last_treat] # calculate TRM1* and TRM2* if (len(aftrm1)>0) & (len(afarm0)>0) & (len(afarm1)>0): sid_df['trm1_star']=sid_df['trm1']*(sid_df['arm0']/sid_df['arm1']) if (len(aftrm2)>0) & (len(afarm1)>0) & (len(afarm2)>0): sid_df['trm2_star']=sid_df['trm2']*(sid_df['arm1']/sid_df['arm2']) # put the last treatment step back in (as zero) last_df=pd.DataFrame([np.zeros(len(list(sid_df.columns)))]) last_df.columns=sid_df.columns last_df['treat']=last_treat new_df=pd.concat((sid_df,last_df)) new_df.reset_index(inplace=True,drop=True) sid_df=new_df # return sid_data_diff,sid_df,afnrm,aftrm1,aftrm2,afarm0,afarm1,afarm2 def set_ARM_data(df,rem_type): """ choose and calculate ARM data (except pre-LTD 0 data) from the inpud data Paramters _________ df : dataframe of measurement data rem_type : remanence type Returns ________ afxrm : XRM data with "meas_norm" column df3 : with base-vector-subtracted data """ XRM0 = str(rem_type) + '0' df2=subtract_base_vector(df,rem_type) df3=df2[df2.description.str.contains(rem_type)] afxrm=df3 if (len(afxrm)>0): meas0=afxrm.magn_mass_diff.tolist()[0] afxrm['meas_norm']=afxrm['magn_mass_diff']/meas0 afxrm=afxrm.loc[afxrm.method_codes.str.contains('LT-LT-Z')==False] afxrm=df2[df2.description.str.contains(rem_type)] afxrm=afxrm[afxrm.description.str.contains(XRM0)==False] meas0=afxrm.magn_mass_diff.tolist()[0] afxrm['meas_norm']=afxrm['magn_mass_diff']/meas0 return afxrm,df3 def set_NTRM_data(df,rem_type): """ choose and calculate NTRM data from the inpud data Paramters _________ df : dataframe of measurement data rem_type : remanence type Returns ________ afxrm : XRM data with "meas_norm" column df3 : with base-vector-subtracted data """ XRM0 = str(rem_type) + '0' df2=subtract_base_vector(df,rem_type) df3=df2[df2.description==rem_type] df4=df2[df2.description.str.contains(XRM0)==True] df5=pd.concat([df3,df4]) #df5.to_csv('_temp.csv',index=True) afxrm=df3 if (len(afxrm)>0): afxrm=afxrm[afxrm.description.str.contains(XRM0)==False] meas0=afxrm.magn_mass.tolist()[0] # get first measurement (after LTD) afxrm['meas_norm']=afxrm['magn_mass']/meas0 # normalized by first measurement return afxrm,df5 def vds(xyz): R=0 cart=xyz.transpose() for i in range(xyz.shape[1]-1): diff=[cart[i][0]-cart[i+1][0],cart[i][1]-cart[i+1][1],cart[i][2]-cart[i+1][2]] dirdiff=pmag.cart2dir(diff) R+=dirdiff[2] return R def wrapper_ltd_pars_mod(args): return ltd_pars_mod(*args) def wrapper_zijd_PCA_calc(args): return zijd_PCA_calc(*args) def zijd_PCA_calc(df,start,end): # used=df[(df.treat_ac_field_mT>=start)&(df.treat_ac_field_mT<=end)] pca_block=used[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm']] pca_block['quality']='g' pca_block=pca_block[['treat_ac_field_mT','dir_dec','dir_inc','meas_norm','quality']].values.tolist() pca_result=pmag.domean(pca_block, 0, len(pca_block)-1, 'DE-BFL') mad=pca_result['specimen_mad'] dang=pca_result['specimen_dang'] spec_n=pca_result['specimen_n'] step_min=pca_result['measurement_step_min'] step_max=pca_result['measurement_step_max'] #print('%5.1f'%(step_min) + ' - %5.1f'%(step_max) + ' mT : MAD= %5.2f'%(mad) \ # + ', DANG= %5.2f'%(dang) + ', N= %2d'%(spec_n)) # return step_min,step_max,mad,dang,spec_n
0
0
907bbce329dfe85ec9ec7bb56a5142c19f23dc3a
2,148
py
Python
backend/app/app/models/user.py
PY-GZKY/Tplan
9f5335f9a9a28afce608744bebed1d9827068e6d
[ "MIT" ]
121
2021-10-29T20:21:37.000Z
2022-03-21T03:33:52.000Z
backend/app/app/models/user.py
GZKY-PY/Tplan
425ca8a497cdb3438bdbf6c72ed8dc234479dd00
[ "MIT" ]
null
null
null
backend/app/app/models/user.py
GZKY-PY/Tplan
425ca8a497cdb3438bdbf6c72ed8dc234479dd00
[ "MIT" ]
8
2021-11-06T07:02:11.000Z
2022-02-28T11:53:23.000Z
import datetime from sqlalchemy import Boolean, Column, Integer, String, ForeignKey, Date from sqlalchemy.orm import relationship from app.db.base_class import Base class User(Base): """""" id = Column(Integer, primary_key=True, index=True) username = Column(String(32), unique=True, index=True, nullable=False, doc="") nickname = Column(String(32), doc="") sex = Column(String(8), doc="") identity_card = Column(String(32), doc="") phone = Column(String(32), doc="") address = Column(String(32), doc="") work_start = Column(Date, doc="", default=datetime.datetime.today()) hashed_password = Column(String(128), nullable=False, doc="") avatar = Column(String(128), doc="", default="https://wpimg.wallstcn.com/f778738c-e4f8-4870-b634-56703b4acafe.gif?imageView2/1/w/80/h/80") introduction = Column(String(256), doc="") status = Column(String(32), nullable=False, doc="") is_active = Column(Boolean(), default=True, doc="") is_superuser = Column(Boolean(), default=False, doc="") user_role = relationship("UserRole", backref="user") user_department = relationship("UserDepartment", backref="user") user_dict = relationship("UserDict", backref="user") class UserRole(Base): """--""" id = Column(Integer, primary_key=True, index=True) user_id = Column(Integer, ForeignKey("user.id", ondelete='CASCADE')) role_id = Column(Integer, ForeignKey("role.id")) role = relationship("Role") class UserDepartment(Base): """--""" id = Column(Integer, primary_key=True, index=True) user_id = Column(Integer, ForeignKey("user.id", ondelete='CASCADE')) department_id = Column(Integer, ForeignKey("department.id")) department = relationship("Department") class UserDict(Base): """--""" id = Column(Integer, primary_key=True, index=True) user_id = Column(Integer, ForeignKey("user.id", ondelete='CASCADE')) dict_id = Column(Integer, ForeignKey("dict_data.id", ondelete='CASCADE')) dict_data = relationship("DictData", backref="user_dict")
37.684211
121
0.684358
import datetime from sqlalchemy import Boolean, Column, Integer, String, ForeignKey, Date from sqlalchemy.orm import relationship from app.db.base_class import Base class User(Base): """用户表""" id = Column(Integer, primary_key=True, index=True) username = Column(String(32), unique=True, index=True, nullable=False, doc="编码") nickname = Column(String(32), doc="姓名") sex = Column(String(8), doc="性别") identity_card = Column(String(32), doc="身份证") phone = Column(String(32), doc="手机号") address = Column(String(32), doc="地址") work_start = Column(Date, doc="入职日期", default=datetime.datetime.today()) hashed_password = Column(String(128), nullable=False, doc="密码") avatar = Column(String(128), doc="头像", default="https://wpimg.wallstcn.com/f778738c-e4f8-4870-b634-56703b4acafe.gif?imageView2/1/w/80/h/80") introduction = Column(String(256), doc="自我介绍") status = Column(String(32), nullable=False, doc="状态") is_active = Column(Boolean(), default=True, doc="是否活跃") is_superuser = Column(Boolean(), default=False, doc="是否超级管理员") user_role = relationship("UserRole", backref="user") user_department = relationship("UserDepartment", backref="user") user_dict = relationship("UserDict", backref="user") class UserRole(Base): """用户-权限组-中间表""" id = Column(Integer, primary_key=True, index=True) user_id = Column(Integer, ForeignKey("user.id", ondelete='CASCADE')) role_id = Column(Integer, ForeignKey("role.id")) role = relationship("Role") class UserDepartment(Base): """用户-部门-中间表""" id = Column(Integer, primary_key=True, index=True) user_id = Column(Integer, ForeignKey("user.id", ondelete='CASCADE')) department_id = Column(Integer, ForeignKey("department.id")) department = relationship("Department") class UserDict(Base): """用户-字典-中间表""" id = Column(Integer, primary_key=True, index=True) user_id = Column(Integer, ForeignKey("user.id", ondelete='CASCADE')) dict_id = Column(Integer, ForeignKey("dict_data.id", ondelete='CASCADE')) dict_data = relationship("DictData", backref="user_dict")
192
0
5911f7aec8f081ff83e457af5fdd4cc279732509
32
py
Python
utils/models/highresnetv2/__init__.py
bhklab/ptl-oar-segmentation
354c3ee7f042a025f74e210a7b8462beac9b727d
[ "Apache-2.0" ]
3
2022-01-18T19:25:46.000Z
2022-02-05T18:53:24.000Z
utils/models/highresnetv2/__init__.py
bhklab/ptl-oar-segmentation
354c3ee7f042a025f74e210a7b8462beac9b727d
[ "Apache-2.0" ]
null
null
null
utils/models/highresnetv2/__init__.py
bhklab/ptl-oar-segmentation
354c3ee7f042a025f74e210a7b8462beac9b727d
[ "Apache-2.0" ]
null
null
null
from .model import HighResNet3D
16
31
0.84375
from .model import HighResNet3D
0
0
99102d586405e98b68ea689e5d8d0984d8adee8b
3,129
py
Python
hathor/p2p/messages.py
mbnunes/hathor-core
e5e0d4a627341e2a37ee46db5c9354ddb7f8dfb8
[ "Apache-2.0" ]
51
2019-12-28T03:33:27.000Z
2022-03-10T14:03:03.000Z
hathor/p2p/messages.py
mbnunes/hathor-core
e5e0d4a627341e2a37ee46db5c9354ddb7f8dfb8
[ "Apache-2.0" ]
316
2019-09-10T09:20:05.000Z
2022-03-31T20:18:56.000Z
hathor/p2p/messages.py
mbnunes/hathor-core
e5e0d4a627341e2a37ee46db5c9354ddb7f8dfb8
[ "Apache-2.0" ]
19
2020-01-04T00:13:18.000Z
2022-02-08T21:18:46.000Z
# Copyright 2021 Hathor Labs # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from enum import Enum from typing import List, NamedTuple class GetNextPayload(NamedTuple): timestamp: int offset: int = 0 class NextPayload(NamedTuple): timestamp: int next_timestamp: int next_offset: int hashes: List[bytes] class GetTipsPayload(NamedTuple): timestamp: int include_hashes: bool offset: int = 0 class TipsPayload(NamedTuple): length: int timestamp: int merkle_tree: bytes hashes: List[str] has_more: bool class ProtocolMessages(Enum): # --- # General Error Messages # --- # Notifies an error. ERROR = 'ERROR' # Notifies a throttle. THROTTLE = 'THROTTLE' # --- # Peer-to-peer Control Messages # --- # Identifies the app and network the peer would like to connect to. HELLO = 'HELLO' # Identifies the peer. PEER_ID = 'PEER-ID' # Tell the other peer your peer-id validations were completed and you are ready READY = 'READY' # Request a list of peers. GET_PEERS = 'GET-PEERS' # Usually it is a response to a GET-PEERS command. But it can be sent # without request when a new peer connects. PEERS = 'PEERS' # Ping is used to prevent an idle connection. PING = 'PING' # Pong is a response to a PING command. PONG = 'PONG' # --- # Hathor Specific Messages # --- GET_DATA = 'GET-DATA' # Request the data for a specific transaction. DATA = 'DATA' # Send the data for a specific transaction. NOT_FOUND = 'NOT-FOUND' # Used when a requested tx from GET-DATA is not found in the peer GET_TIPS = 'GET-TIPS' TIPS = 'TIPS' TIPS_END = 'TIPS-END' RELAY = 'RELAY' GET_NEXT = 'GET-NEXT' NEXT = 'NEXT' # Sync-v2 messages GET_NEXT_BLOCKS = 'GET-NEXT-BLOCKS' GET_PREV_BLOCKS = 'GET-PREV-BLOCKS' BLOCKS = 'BLOCKS' BLOCKS_END = 'BLOCKS-END' GET_BEST_BLOCK = 'GET-BEST-BLOCK' # Request the best block of the peer BEST_BLOCK = 'BEST-BLOCK' # Send the best block to your peer GET_BLOCK_TXS = 'GET-BLOCK-TXS' # TODO: rename, maybe GET-TX-RANGE or repurpose GET-TRANSACTIONS above TRANSACTION = 'TRANSACTION' GET_MEMPOOL = 'GET-MEMPOOL' # TODO: rename, maybe GET-TX-RANGE or repurpose GET-TRANSACTIONS above MEMPOOL_END = 'MEMPOOL-END' # End of mempool sync GET_COMMON_CHAIN = 'GET-COMMON-CHAIN' COMMON_CHAIN = 'COMMON-CHAIN' GET_PEER_BLOCK_HASHES = 'GET-PEER-BLOCK-HASHES' PEER_BLOCK_HASHES = 'PEER-BLOCK-HASHES' STOP_BLOCK_STREAMING = 'STOP-BLOCK-STREAMING'
26.294118
107
0.676894
# Copyright 2021 Hathor Labs # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from enum import Enum from typing import List, NamedTuple class GetNextPayload(NamedTuple): timestamp: int offset: int = 0 class NextPayload(NamedTuple): timestamp: int next_timestamp: int next_offset: int hashes: List[bytes] class GetTipsPayload(NamedTuple): timestamp: int include_hashes: bool offset: int = 0 class TipsPayload(NamedTuple): length: int timestamp: int merkle_tree: bytes hashes: List[str] has_more: bool class ProtocolMessages(Enum): # --- # General Error Messages # --- # Notifies an error. ERROR = 'ERROR' # Notifies a throttle. THROTTLE = 'THROTTLE' # --- # Peer-to-peer Control Messages # --- # Identifies the app and network the peer would like to connect to. HELLO = 'HELLO' # Identifies the peer. PEER_ID = 'PEER-ID' # Tell the other peer your peer-id validations were completed and you are ready READY = 'READY' # Request a list of peers. GET_PEERS = 'GET-PEERS' # Usually it is a response to a GET-PEERS command. But it can be sent # without request when a new peer connects. PEERS = 'PEERS' # Ping is used to prevent an idle connection. PING = 'PING' # Pong is a response to a PING command. PONG = 'PONG' # --- # Hathor Specific Messages # --- GET_DATA = 'GET-DATA' # Request the data for a specific transaction. DATA = 'DATA' # Send the data for a specific transaction. NOT_FOUND = 'NOT-FOUND' # Used when a requested tx from GET-DATA is not found in the peer GET_TIPS = 'GET-TIPS' TIPS = 'TIPS' TIPS_END = 'TIPS-END' RELAY = 'RELAY' GET_NEXT = 'GET-NEXT' NEXT = 'NEXT' # Sync-v2 messages GET_NEXT_BLOCKS = 'GET-NEXT-BLOCKS' GET_PREV_BLOCKS = 'GET-PREV-BLOCKS' BLOCKS = 'BLOCKS' BLOCKS_END = 'BLOCKS-END' GET_BEST_BLOCK = 'GET-BEST-BLOCK' # Request the best block of the peer BEST_BLOCK = 'BEST-BLOCK' # Send the best block to your peer GET_BLOCK_TXS = 'GET-BLOCK-TXS' # TODO: rename, maybe GET-TX-RANGE or repurpose GET-TRANSACTIONS above TRANSACTION = 'TRANSACTION' GET_MEMPOOL = 'GET-MEMPOOL' # TODO: rename, maybe GET-TX-RANGE or repurpose GET-TRANSACTIONS above MEMPOOL_END = 'MEMPOOL-END' # End of mempool sync GET_COMMON_CHAIN = 'GET-COMMON-CHAIN' COMMON_CHAIN = 'COMMON-CHAIN' GET_PEER_BLOCK_HASHES = 'GET-PEER-BLOCK-HASHES' PEER_BLOCK_HASHES = 'PEER-BLOCK-HASHES' STOP_BLOCK_STREAMING = 'STOP-BLOCK-STREAMING'
0
0
256b9c2991d095bc7b5875dc44f643f8c72cc6ef
1,980
py
Python
libya_elections/settings/dev.py
SmartElect/SmartElect
d6d35f2fa8f60e756ad5247f8f0a5f05830e92f8
[ "Apache-2.0" ]
23
2015-10-28T14:08:23.000Z
2021-09-11T21:38:41.000Z
libya_elections/settings/dev.py
SmartElect/SmartElect
d6d35f2fa8f60e756ad5247f8f0a5f05830e92f8
[ "Apache-2.0" ]
4
2019-12-05T20:36:10.000Z
2020-06-05T18:41:54.000Z
libya_elections/settings/dev.py
SmartElect/SmartElect
d6d35f2fa8f60e756ad5247f8f0a5f05830e92f8
[ "Apache-2.0" ]
11
2015-10-28T15:49:56.000Z
2021-09-14T14:18:36.000Z
import sys from libya_elections.settings.base import * # noqa DEBUG = True SECRET_KEY = 'dummy secret key for testing only' INTERNAL_IPS = ('127.0.0.1', ) EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' CELERY_TASK_ALWAYS_EAGER = False INSTALLED_BACKENDS = { HTTPTESTER_BACKEND: { "ENGINE": "rapidsms.backends.database.DatabaseBackend", }, "vumi-fake-smsc": { "ENGINE": "rapidsms.backends.vumi.VumiBackend", # Default to localhost, but allow override "sendsms_url": os.getenv("vumi_fake_smsc_sendsms_url", "http://127.0.0.1:9000/send/"), }, "vumi-http": { "ENGINE": "rapidsms.backends.vumi.VumiBackend", # Default to localhost, but allow override "sendsms_url": os.getenv("VUMI_HTTP_SENDSMS_URL", "http://127.0.0.1:9000/send/"), }, } CACHES = { 'default': { # Use same backend as in production 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', # Assume memcached is local 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 60 * 60, # one hour } } # Special test settings if 'test' in sys.argv: CELERY_TASK_ALWAYS_EAGER = True CELERY_TASK_EAGER_PROPAGATES = True PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', ) CAPTCHA_TEST_MODE = True REPORTING_REDIS_KEY_PREFIX = 'os_reporting_api_ut_' # use default storage for tests, since we don't run collectstatic for tests STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' else: # Enable all tools for local development, but not when running tests. ENABLE_ALL_TOOLS = True # Enable django-debug-toolbar if not running tests INSTALLED_APPS[-1:-1] = ( "debug_toolbar", ) DEBUG_TOOLBAR_PATCH_SETTINGS = False MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', )
29.117647
94
0.676768
import sys from libya_elections.settings.base import * # noqa DEBUG = True SECRET_KEY = 'dummy secret key for testing only' INTERNAL_IPS = ('127.0.0.1', ) EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' CELERY_TASK_ALWAYS_EAGER = False INSTALLED_BACKENDS = { HTTPTESTER_BACKEND: { "ENGINE": "rapidsms.backends.database.DatabaseBackend", }, "vumi-fake-smsc": { "ENGINE": "rapidsms.backends.vumi.VumiBackend", # Default to localhost, but allow override "sendsms_url": os.getenv("vumi_fake_smsc_sendsms_url", "http://127.0.0.1:9000/send/"), }, "vumi-http": { "ENGINE": "rapidsms.backends.vumi.VumiBackend", # Default to localhost, but allow override "sendsms_url": os.getenv("VUMI_HTTP_SENDSMS_URL", "http://127.0.0.1:9000/send/"), }, } CACHES = { 'default': { # Use same backend as in production 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache', # Assume memcached is local 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 60 * 60, # one hour } } # Special test settings if 'test' in sys.argv: CELERY_TASK_ALWAYS_EAGER = True CELERY_TASK_EAGER_PROPAGATES = True PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', ) CAPTCHA_TEST_MODE = True REPORTING_REDIS_KEY_PREFIX = 'os_reporting_api_ut_' # use default storage for tests, since we don't run collectstatic for tests STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' else: # Enable all tools for local development, but not when running tests. ENABLE_ALL_TOOLS = True # Enable django-debug-toolbar if not running tests INSTALLED_APPS[-1:-1] = ( "debug_toolbar", ) DEBUG_TOOLBAR_PATCH_SETTINGS = False MIDDLEWARE += ( 'debug_toolbar.middleware.DebugToolbarMiddleware', )
0
0
d9da596db75589f3cb736b7eaf7f3d9f34f2592d
13,278
py
Python
pontoon/administration/tests/test_views.py
rhencke/pontoon
d530830acd4e03f3e29cae3273a5fede9f246499
[ "BSD-3-Clause" ]
1
2018-12-24T11:15:35.000Z
2018-12-24T11:15:35.000Z
pontoon/administration/tests/test_views.py
rhencke/pontoon
d530830acd4e03f3e29cae3273a5fede9f246499
[ "BSD-3-Clause" ]
1
2018-08-03T12:02:41.000Z
2018-08-03T12:02:41.000Z
pontoon/administration/tests/test_views.py
shkamaru/pontoon
6a1ec623d1518867e6b9b1d2059da00716a03ac0
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- import pytest from django.core.urlresolvers import reverse from pontoon.administration.forms import ( ProjectForm, ) from pontoon.administration.views import _create_or_update_translated_resources from pontoon.base.models import ( Entity, Locale, Project, ProjectLocale, Resource, TranslatedResource, ) from pontoon.test.factories import ( EntityFactory, LocaleFactory, ProjectFactory, ResourceFactory, TranslationFactory, UserFactory, ) @pytest.mark.django_db def test_manage_project_strings(client): project = ProjectFactory.create(data_source='database', repositories=[]) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) # Test with anonymous user. response = client.get(url) assert response.status_code == 403 # Test with a user that is not a superuser. user = UserFactory.create() client.force_login(user) response = client.get(url) assert response.status_code == 403 # Test with a superuser. user.is_superuser = True user.save() response = client.get(url) assert response.status_code == 200 @pytest.mark.django_db def test_manage_project(client_superuser): url = reverse('pontoon.admin.project.new') response = client_superuser.get(url) assert response.status_code == 200 @pytest.mark.django_db def test_manage_project_strings_bad_request(client_superuser): # Tets an unknown project returns a 404 error. url = reverse('pontoon.admin.project.strings', args=('unknown',)) response = client_superuser.get(url) assert response.status_code == 404 @pytest.mark.django_db def test_manage_project_strings_new(client_superuser, locale_a): project = ProjectFactory.create( data_source='database', repositories=[], locales=[locale_a], ) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) # Test sending a well-formatted batch of strings. new_strings = """Hey, I just met you And this is crazy But here's my number So call me maybe? """ response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Verify a resource has been created. resources = list(Resource.objects.filter(project=project)) assert len(resources) == 1 assert resources[0].path == 'database' # Verify all strings have been created as entities. entities = Entity.for_project_locale(project, locale_a) assert len(entities) == 4 expected_strings = [ 'Hey, I just met you', 'And this is crazy', 'But here\'s my number', 'So call me maybe?', ] assert expected_strings == [x.string for x in entities] # Verify strings have the correct order. for index, entity in enumerate(entities): assert entity.order == index # Verify new strings appear on the page. assert 'Hey, I just met you' in response.content @pytest.mark.django_db def test_manage_project_strings_translated_resource(client_superuser): """Test that adding new strings to a project enables translation of that project on all enabled locales. """ locales = [ LocaleFactory.create(code='kl', name='Klingon'), LocaleFactory.create(code='gs', name='Geonosian'), ] project = ProjectFactory.create( data_source='database', locales=locales, repositories=[] ) locales_count = len(locales) _create_or_update_translated_resources(project, locales) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) new_strings = """ Morty, do you know what "Wubba lubba dub dub" means? Oh that's just Rick's stupid non-sense catch phrase. It's not. In my people's tongue, it means "I am in great pain, please help me". """ strings_count = 4 response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Verify no strings have been created as entities. entities = list(Entity.objects.filter(resource__project=project)) assert len(entities) == strings_count # Verify the resource has the right stats. resources = Resource.objects.filter(project=project) assert len(resources) == 1 resource = resources[0] assert resource.total_strings == strings_count # Verify the correct TranslatedResource objects have been created. translated_resources = TranslatedResource.objects.filter(resource__project=project) assert len(translated_resources) == locales_count # Verify stats have been correctly updated on locale, project and resource. for tr in translated_resources: assert tr.total_strings == strings_count project = Project.objects.get(id=project.id) assert project.total_strings == strings_count * locales_count for l in locales: locale = Locale.objects.get(id=l.id) assert locale.total_strings == strings_count @pytest.mark.django_db def test_manage_project_strings_new_all_empty(client_superuser): """Test that sending empty data doesn't create empty strings in the database. """ project = ProjectFactory.create(data_source='database', repositories=[]) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) # Test sending an empty batch of strings. new_strings = " \n \n\n" response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Verify no strings have been created as entities. entities = list(Entity.objects.filter(resource__project=project)) assert len(entities) == 0 @pytest.mark.django_db def test_manage_project_strings_list(client_superuser): project = ProjectFactory.create(data_source='database', repositories=[]) resource = ResourceFactory.create(project=project) nb_entities = 2 entities = EntityFactory.create_batch(nb_entities, resource=resource) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) response = client_superuser.get(url) assert response.status_code == 200 for i in range(nb_entities): assert 'string %s' % i in response.content # Test editing strings and comments. form_data = { 'form-TOTAL_FORMS': nb_entities, 'form-INITIAL_FORMS': nb_entities, 'form-MIN_NUM_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, 'form-0-id': entities[0].id, 'form-0-string': 'changed 0', 'form-0-comment': 'Wubba lubba dub dub', 'form-1-id': entities[1].id, 'form-1-string': 'string 1', 'form-1-obsolete': 'on', # Remove this one. } response = client_superuser.post(url, form_data) assert response.status_code == 200 assert 'changed 0' in response.content assert 'Wubba lubba dub dub' in response.content assert 'string 0' not in response.content assert 'string 1' not in response.content # It's been removed. total = Entity.objects.filter( resource=resource, obsolete=False, ).count() assert total == nb_entities - 1 # Test adding a new string. form_data = { 'form-TOTAL_FORMS': nb_entities, 'form-INITIAL_FORMS': nb_entities - 1, 'form-MIN_NUM_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, 'form-0-id': entities[0].id, 'form-0-string': 'changed 0', 'form-0-comment': 'Wubba lubba dub dub', 'form-1-id': '', 'form-1-string': 'new string', 'form-1-comment': 'adding this entity now', } response = client_superuser.post(url, form_data) assert response.status_code == 200 assert 'changed 0' in response.content assert 'new string' in response.content assert 'adding this entity now' in response.content total = Entity.objects.filter( resource=resource, obsolete=False, ).count() assert total == nb_entities # Verify the new string has the correct order. new_string = Entity.objects.filter( resource=resource, obsolete=False, string='new string', ).first() # The highest order before adding new string was 0, # so the order of that new one should be 1. assert new_string.order == 1 @pytest.mark.django_db def test_manage_project_strings_download_csv(client_superuser): locale_kl = LocaleFactory.create(code='kl', name='Klingon') locale_gs = LocaleFactory.create(code='gs', name='Geonosian') project = ProjectFactory.create( data_source='database', locales=[locale_kl, locale_gs], repositories=[] ) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) new_strings = """ And on the pedestal these words appear: 'My name is Ozymandias, king of kings: Look on my works, ye Mighty, and despair!' """ response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Test downloading the data. response = client_superuser.get(url, {'format': 'csv'}) assert response.status_code == 200 assert response._headers['content-type'] == ('Content-Type', 'text/csv') # Verify the original content is here. assert 'pedestal' in response.content assert 'Ozymandias' in response.content assert 'Mighty' in response.content # Verify we have the locale columns. assert 'kl' in response.content assert 'gs' in response.content # Now add some translations. entity = Entity.objects.filter(string='And on the pedestal these words appear:')[0] TranslationFactory.create( string='Et sur le pidestal il y a ces mots :', entity=entity, locale=locale_kl, approved=True, ) TranslationFactory.create( string='Und auf dem Sockel steht die Schrift: Mein Name', entity=entity, locale=locale_gs, approved=True, ) entity = Entity.objects.filter(string='\'My name is Ozymandias, king of kings:')[0] TranslationFactory.create( string='"Mon nom est Ozymandias, Roi des Rois.', entity=entity, locale=locale_kl, approved=True, ) TranslationFactory.create( string='Ist Osymandias, aller Knge Knig: ', entity=entity, locale=locale_gs, approved=True, ) entity = Entity.objects.filter(string='Look on my works, ye Mighty, and despair!\'')[0] TranslationFactory.create( string='Voyez mon uvre, vous puissants, et dsesprez !"', entity=entity, locale=locale_kl, approved=True, ) TranslationFactory.create( string='Seht meine Werke, Mchtge, und erbebt!', entity=entity, locale=locale_gs, approved=True, ) response = client_superuser.get(url, {'format': 'csv'}) # Verify the translated content is here. assert 'pedestal' in response.content assert 'pidestal' in response.content assert 'Sockel' in response.content assert 'Mighty' in response.content assert 'puissants' in response.content assert 'Mchtge' in response.content @pytest.mark.django_db def test_project_add_locale(client_superuser): locale_kl = LocaleFactory.create(code='kl', name='Klingon') locale_gs = LocaleFactory.create(code='gs', name='Geonosian') project = ProjectFactory.create( data_source='database', locales=[locale_kl], repositories=[], ) _create_or_update_translated_resources(project, [locale_kl]) url = reverse('pontoon.admin.project', args=(project.slug,)) # Boring data creation for FormSets. Django is painful with that, # or I don't know how to handle that more gracefully. form = ProjectForm(instance=project) form_data = dict(form.initial) del form_data['width'] del form_data['deadline'] del form_data['contact'] form_data.update({ 'subpage_set-INITIAL_FORMS': '0', 'subpage_set-TOTAL_FORMS': '1', 'subpage_set-MIN_NUM_FORMS': '0', 'subpage_set-MAX_NUM_FORMS': '1000', 'externalresource_set-TOTAL_FORMS': '1', 'externalresource_set-MAX_NUM_FORMS': '1000', 'externalresource_set-MIN_NUM_FORMS': '0', 'externalresource_set-INITIAL_FORMS': '0', 'tag_set-TOTAL_FORMS': '1', 'tag_set-INITIAL_FORMS': '0', 'tag_set-MAX_NUM_FORMS': '1000', 'tag_set-MIN_NUM_FORMS': '0', 'repositories-INITIAL_FORMS': '0', 'repositories-MIN_NUM_FORMS': '0', 'repositories-MAX_NUM_FORMS': '1000', 'repositories-TOTAL_FORMS': '0', # These are the values that actually matter. 'pk': project.pk, 'locales': [locale_kl.id, locale_gs.id], }) response = client_superuser.post(url, form_data) assert response.status_code == 200 assert '. Error.' not in response.content # Verify we have the right ProjectLocale objects. pl = ProjectLocale.objects.filter(project=project) assert len(pl) == 2 # Verify that TranslatedResource objects have been created. resource = Resource.objects.get(project=project, path='database') tr = TranslatedResource.objects.filter(resource=resource) assert len(tr) == 2
33.029851
91
0.675629
# -*- coding: utf-8 -*- import pytest from django.core.urlresolvers import reverse from pontoon.administration.forms import ( ProjectForm, ) from pontoon.administration.views import _create_or_update_translated_resources from pontoon.base.models import ( Entity, Locale, Project, ProjectLocale, Resource, TranslatedResource, ) from pontoon.test.factories import ( EntityFactory, LocaleFactory, ProjectFactory, ResourceFactory, TranslationFactory, UserFactory, ) @pytest.mark.django_db def test_manage_project_strings(client): project = ProjectFactory.create(data_source='database', repositories=[]) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) # Test with anonymous user. response = client.get(url) assert response.status_code == 403 # Test with a user that is not a superuser. user = UserFactory.create() client.force_login(user) response = client.get(url) assert response.status_code == 403 # Test with a superuser. user.is_superuser = True user.save() response = client.get(url) assert response.status_code == 200 @pytest.mark.django_db def test_manage_project(client_superuser): url = reverse('pontoon.admin.project.new') response = client_superuser.get(url) assert response.status_code == 200 @pytest.mark.django_db def test_manage_project_strings_bad_request(client_superuser): # Tets an unknown project returns a 404 error. url = reverse('pontoon.admin.project.strings', args=('unknown',)) response = client_superuser.get(url) assert response.status_code == 404 @pytest.mark.django_db def test_manage_project_strings_new(client_superuser, locale_a): project = ProjectFactory.create( data_source='database', repositories=[], locales=[locale_a], ) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) # Test sending a well-formatted batch of strings. new_strings = """Hey, I just met you And this is crazy But here's my number So call me maybe? """ response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Verify a resource has been created. resources = list(Resource.objects.filter(project=project)) assert len(resources) == 1 assert resources[0].path == 'database' # Verify all strings have been created as entities. entities = Entity.for_project_locale(project, locale_a) assert len(entities) == 4 expected_strings = [ 'Hey, I just met you', 'And this is crazy', 'But here\'s my number', 'So call me maybe?', ] assert expected_strings == [x.string for x in entities] # Verify strings have the correct order. for index, entity in enumerate(entities): assert entity.order == index # Verify new strings appear on the page. assert 'Hey, I just met you' in response.content @pytest.mark.django_db def test_manage_project_strings_translated_resource(client_superuser): """Test that adding new strings to a project enables translation of that project on all enabled locales. """ locales = [ LocaleFactory.create(code='kl', name='Klingon'), LocaleFactory.create(code='gs', name='Geonosian'), ] project = ProjectFactory.create( data_source='database', locales=locales, repositories=[] ) locales_count = len(locales) _create_or_update_translated_resources(project, locales) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) new_strings = """ Morty, do you know what "Wubba lubba dub dub" means? Oh that's just Rick's stupid non-sense catch phrase. It's not. In my people's tongue, it means "I am in great pain, please help me". """ strings_count = 4 response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Verify no strings have been created as entities. entities = list(Entity.objects.filter(resource__project=project)) assert len(entities) == strings_count # Verify the resource has the right stats. resources = Resource.objects.filter(project=project) assert len(resources) == 1 resource = resources[0] assert resource.total_strings == strings_count # Verify the correct TranslatedResource objects have been created. translated_resources = TranslatedResource.objects.filter(resource__project=project) assert len(translated_resources) == locales_count # Verify stats have been correctly updated on locale, project and resource. for tr in translated_resources: assert tr.total_strings == strings_count project = Project.objects.get(id=project.id) assert project.total_strings == strings_count * locales_count for l in locales: locale = Locale.objects.get(id=l.id) assert locale.total_strings == strings_count @pytest.mark.django_db def test_manage_project_strings_new_all_empty(client_superuser): """Test that sending empty data doesn't create empty strings in the database. """ project = ProjectFactory.create(data_source='database', repositories=[]) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) # Test sending an empty batch of strings. new_strings = " \n \n\n" response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Verify no strings have been created as entities. entities = list(Entity.objects.filter(resource__project=project)) assert len(entities) == 0 @pytest.mark.django_db def test_manage_project_strings_list(client_superuser): project = ProjectFactory.create(data_source='database', repositories=[]) resource = ResourceFactory.create(project=project) nb_entities = 2 entities = EntityFactory.create_batch(nb_entities, resource=resource) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) response = client_superuser.get(url) assert response.status_code == 200 for i in range(nb_entities): assert 'string %s' % i in response.content # Test editing strings and comments. form_data = { 'form-TOTAL_FORMS': nb_entities, 'form-INITIAL_FORMS': nb_entities, 'form-MIN_NUM_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, 'form-0-id': entities[0].id, 'form-0-string': 'changed 0', 'form-0-comment': 'Wubba lubba dub dub', 'form-1-id': entities[1].id, 'form-1-string': 'string 1', 'form-1-obsolete': 'on', # Remove this one. } response = client_superuser.post(url, form_data) assert response.status_code == 200 assert 'changed 0' in response.content assert 'Wubba lubba dub dub' in response.content assert 'string 0' not in response.content assert 'string 1' not in response.content # It's been removed. total = Entity.objects.filter( resource=resource, obsolete=False, ).count() assert total == nb_entities - 1 # Test adding a new string. form_data = { 'form-TOTAL_FORMS': nb_entities, 'form-INITIAL_FORMS': nb_entities - 1, 'form-MIN_NUM_FORMS': 0, 'form-MAX_NUM_FORMS': 1000, 'form-0-id': entities[0].id, 'form-0-string': 'changed 0', 'form-0-comment': 'Wubba lubba dub dub', 'form-1-id': '', 'form-1-string': 'new string', 'form-1-comment': 'adding this entity now', } response = client_superuser.post(url, form_data) assert response.status_code == 200 assert 'changed 0' in response.content assert 'new string' in response.content assert 'adding this entity now' in response.content total = Entity.objects.filter( resource=resource, obsolete=False, ).count() assert total == nb_entities # Verify the new string has the correct order. new_string = Entity.objects.filter( resource=resource, obsolete=False, string='new string', ).first() # The highest order before adding new string was 0, # so the order of that new one should be 1. assert new_string.order == 1 @pytest.mark.django_db def test_manage_project_strings_download_csv(client_superuser): locale_kl = LocaleFactory.create(code='kl', name='Klingon') locale_gs = LocaleFactory.create(code='gs', name='Geonosian') project = ProjectFactory.create( data_source='database', locales=[locale_kl, locale_gs], repositories=[] ) url = reverse('pontoon.admin.project.strings', args=(project.slug,)) new_strings = """ And on the pedestal these words appear: 'My name is Ozymandias, king of kings: Look on my works, ye Mighty, and despair!' """ response = client_superuser.post(url, {'new_strings': new_strings}) assert response.status_code == 200 # Test downloading the data. response = client_superuser.get(url, {'format': 'csv'}) assert response.status_code == 200 assert response._headers['content-type'] == ('Content-Type', 'text/csv') # Verify the original content is here. assert 'pedestal' in response.content assert 'Ozymandias' in response.content assert 'Mighty' in response.content # Verify we have the locale columns. assert 'kl' in response.content assert 'gs' in response.content # Now add some translations. entity = Entity.objects.filter(string='And on the pedestal these words appear:')[0] TranslationFactory.create( string='Et sur le piédestal il y a ces mots :', entity=entity, locale=locale_kl, approved=True, ) TranslationFactory.create( string='Und auf dem Sockel steht die Schrift: ‚Mein Name', entity=entity, locale=locale_gs, approved=True, ) entity = Entity.objects.filter(string='\'My name is Ozymandias, king of kings:')[0] TranslationFactory.create( string='"Mon nom est Ozymandias, Roi des Rois.', entity=entity, locale=locale_kl, approved=True, ) TranslationFactory.create( string='Ist Osymandias, aller Kön’ge König: –', entity=entity, locale=locale_gs, approved=True, ) entity = Entity.objects.filter(string='Look on my works, ye Mighty, and despair!\'')[0] TranslationFactory.create( string='Voyez mon œuvre, vous puissants, et désespérez !"', entity=entity, locale=locale_kl, approved=True, ) TranslationFactory.create( string='Seht meine Werke, Mächt’ge, und erbebt!‘', entity=entity, locale=locale_gs, approved=True, ) response = client_superuser.get(url, {'format': 'csv'}) # Verify the translated content is here. assert 'pedestal' in response.content assert 'piédestal' in response.content assert 'Sockel' in response.content assert 'Mighty' in response.content assert 'puissants' in response.content assert 'Mächt’ge' in response.content @pytest.mark.django_db def test_project_add_locale(client_superuser): locale_kl = LocaleFactory.create(code='kl', name='Klingon') locale_gs = LocaleFactory.create(code='gs', name='Geonosian') project = ProjectFactory.create( data_source='database', locales=[locale_kl], repositories=[], ) _create_or_update_translated_resources(project, [locale_kl]) url = reverse('pontoon.admin.project', args=(project.slug,)) # Boring data creation for FormSets. Django is painful with that, # or I don't know how to handle that more gracefully. form = ProjectForm(instance=project) form_data = dict(form.initial) del form_data['width'] del form_data['deadline'] del form_data['contact'] form_data.update({ 'subpage_set-INITIAL_FORMS': '0', 'subpage_set-TOTAL_FORMS': '1', 'subpage_set-MIN_NUM_FORMS': '0', 'subpage_set-MAX_NUM_FORMS': '1000', 'externalresource_set-TOTAL_FORMS': '1', 'externalresource_set-MAX_NUM_FORMS': '1000', 'externalresource_set-MIN_NUM_FORMS': '0', 'externalresource_set-INITIAL_FORMS': '0', 'tag_set-TOTAL_FORMS': '1', 'tag_set-INITIAL_FORMS': '0', 'tag_set-MAX_NUM_FORMS': '1000', 'tag_set-MIN_NUM_FORMS': '0', 'repositories-INITIAL_FORMS': '0', 'repositories-MIN_NUM_FORMS': '0', 'repositories-MAX_NUM_FORMS': '1000', 'repositories-TOTAL_FORMS': '0', # These are the values that actually matter. 'pk': project.pk, 'locales': [locale_kl.id, locale_gs.id], }) response = client_superuser.post(url, form_data) assert response.status_code == 200 assert '. Error.' not in response.content # Verify we have the right ProjectLocale objects. pl = ProjectLocale.objects.filter(project=project) assert len(pl) == 2 # Verify that TranslatedResource objects have been created. resource = Resource.objects.get(project=project, path='database') tr = TranslatedResource.objects.filter(resource=resource) assert len(tr) == 2
36
0
e0cfbaa527144500c045f954a24270f62d26262d
1,870
py
Python
orix/quaternion/__init__.py
bm424/texpy
8d78b568209a6da36fc831c6bc9e2b0cb4c740c8
[ "MIT" ]
6
2018-02-05T18:37:10.000Z
2018-10-07T22:07:26.000Z
orix/quaternion/__init__.py
bm424/texpy
8d78b568209a6da36fc831c6bc9e2b0cb4c740c8
[ "MIT" ]
5
2018-11-04T18:06:28.000Z
2019-09-13T11:22:43.000Z
orix/quaternion/__init__.py
bm424/texpy
8d78b568209a6da36fc831c6bc9e2b0cb4c740c8
[ "MIT" ]
3
2019-04-27T09:24:28.000Z
2019-09-13T10:24:57.000Z
# -*- coding: utf-8 -*- # Copyright 2018-2022 the orix developers # # This file is part of orix. # # orix is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # orix is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with orix. If not, see <http://www.gnu.org/licenses/>. """Four-dimensional objects. In a simplified sense, quaternions are an extension of the concept of complex numbers, represented by :math:`a + bi + cj + dk` where :math:`i`, :math:`j`, and :math:`k` are quaternion units and :math:`i^2 = j^2 = k^2 = ijk = -1`. For further reference see `the Wikipedia article <https://en.wikipedia.org/wiki/Quaternion>`_. Unit quaternions are efficient objects for representing rotations, and hence orientations. """ from orix.quaternion.quaternion import Quaternion, check_quaternion # isort: skip from orix.quaternion.orientation import Misorientation, Orientation from orix.quaternion.orientation_region import OrientationRegion, get_proper_groups from orix.quaternion.rotation import Rotation, von_mises from orix.quaternion.symmetry import Symmetry, get_distinguished_points, get_point_group # Lists what will be imported when calling "from orix.quaternion import *" __all__ = [ "check_quaternion", "Quaternion", "Rotation", "von_mises", "Misorientation", "Orientation", "get_proper_groups", "OrientationRegion", "get_distinguished_points", "get_point_group", "Symmetry", ]
36.666667
88
0.750802
# -*- coding: utf-8 -*- # Copyright 2018-2022 the orix developers # # This file is part of orix. # # orix is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # orix is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with orix. If not, see <http://www.gnu.org/licenses/>. """Four-dimensional objects. In a simplified sense, quaternions are an extension of the concept of complex numbers, represented by :math:`a + bi + cj + dk` where :math:`i`, :math:`j`, and :math:`k` are quaternion units and :math:`i^2 = j^2 = k^2 = ijk = -1`. For further reference see `the Wikipedia article <https://en.wikipedia.org/wiki/Quaternion>`_. Unit quaternions are efficient objects for representing rotations, and hence orientations. """ from orix.quaternion.quaternion import Quaternion, check_quaternion # isort: skip from orix.quaternion.orientation import Misorientation, Orientation from orix.quaternion.orientation_region import OrientationRegion, get_proper_groups from orix.quaternion.rotation import Rotation, von_mises from orix.quaternion.symmetry import Symmetry, get_distinguished_points, get_point_group # Lists what will be imported when calling "from orix.quaternion import *" __all__ = [ "check_quaternion", "Quaternion", "Rotation", "von_mises", "Misorientation", "Orientation", "get_proper_groups", "OrientationRegion", "get_distinguished_points", "get_point_group", "Symmetry", ]
0
0
a2c196cbaf7e46bd084dd1e8aa85dc85a0321db6
3,103
py
Python
dz5/integrators/integrators.py
vedrankolka/APR
4f8afefc74f3d0f67f5d2ec665c93a4b38fbdf2f
[ "Apache-2.0" ]
null
null
null
dz5/integrators/integrators.py
vedrankolka/APR
4f8afefc74f3d0f67f5d2ec665c93a4b38fbdf2f
[ "Apache-2.0" ]
null
null
null
dz5/integrators/integrators.py
vedrankolka/APR
4f8afefc74f3d0f67f5d2ec665c93a4b38fbdf2f
[ "Apache-2.0" ]
null
null
null
import numpy as np class Integrator: def __init__(self, A, B=0, step=0.01, r=lambda t: 1): self.A = A self.B = B self.step = step self.r = r def integrate(self, x0, start, end, callbacks=[], v=100): xs = [x0] ts = [start] x = x0 t = start + self.step i = 0 while t <= end: x = self.next(x, t) xs.append(x) ts.append(t) t += self.step if v is not None and i % v == 0: for callback in callbacks: callback(x) i += 1 return np.array(xs), np.array(ts) def next(self, x, t): pass class ImplicitIntegrator(Integrator): def next_implicit(self, x, t, x_next_predicted): pass class EulerIntegrator(Integrator): def next(self, x, t): delta_x = (self.A @ x + self.B * self.r(t)) * self.step return x + delta_x class ReversedEulerIntegrator(ImplicitIntegrator): def __init__(self, A, B=0, step=0.01, r=lambda t: 1): super(ReversedEulerIntegrator, self).__init__(A, B, step, r) n = A.shape[0] I = np.identity(n) P = np.linalg.inv((I - step * A)) self.P = P self.Q = P @ (step * B) def next(self, x, t): return self.P @ x + self.Q * self.r(t + self.step) def next_implicit(self, x, t, x_next_predicted): delta_x = self.step * (self.A @ x_next_predicted + self.B * self.r(t + self.step)) return x + delta_x class TrapezeIntegrator(ImplicitIntegrator): def __init__(self, A, B=0, step=0.01, r=lambda t: 1): super(TrapezeIntegrator, self).__init__(A, B, step, r) n = A.shape[0] I = np.identity(n) P = np.linalg.inv(I - A * step/2) self.R = P @ (I + A * step/2) self.S = P @ (step/2 * B) def next(self, x, t): return self.R @ x + self.S * (self.r(t) + self.r(t + self.step)) def next_implicit(self, x, t, x_next_predicted): delta_x = self.step/2 * (self.A @ x + self.B * self.r(t) + self.A @ x_next_predicted + self.B * self.r(t + self.step)) return x + delta_x class RungeKutta4Real(Integrator): def __init__(self, A, B=0, step=0.01, r=lambda t: 1): super(RungeKutta4Real, self).__init__(A, B, step, r) def __f(self, x, t): return self.A @ x + self.B * self.r(t) def next(self, x, t): T = self.step m1 = self.__f(x, t) m2 = self.__f(x + T/2 * m1, t + T/2) m3 = self.__f(x + T/2 * m2, t + T/2) m4 = self.__f(x + T*m3, t + T) return x + T/6 * (m1 + 2*m2 + 2*m3 + m4) class PECEIntegrator(Integrator): def __init__(self, predictor: Integrator, corrector: ImplicitIntegrator, s: int): self.predictor = predictor self.corrector = corrector self.s = s self.step = predictor.step def next(self, x, t): x_next = self.predictor.next(x, t) for i in range(self.s): x_next = self.corrector.next_implicit(x, t, x_next) return x_next
26.982609
126
0.535933
import numpy as np class Integrator: def __init__(self, A, B=0, step=0.01, r=lambda t: 1): self.A = A self.B = B self.step = step self.r = r def integrate(self, x0, start, end, callbacks=[], v=100): xs = [x0] ts = [start] x = x0 t = start + self.step i = 0 while t <= end: x = self.next(x, t) xs.append(x) ts.append(t) t += self.step if v is not None and i % v == 0: for callback in callbacks: callback(x) i += 1 return np.array(xs), np.array(ts) def next(self, x, t): pass class ImplicitIntegrator(Integrator): def next_implicit(self, x, t, x_next_predicted): pass class EulerIntegrator(Integrator): def next(self, x, t): delta_x = (self.A @ x + self.B * self.r(t)) * self.step return x + delta_x class ReversedEulerIntegrator(ImplicitIntegrator): def __init__(self, A, B=0, step=0.01, r=lambda t: 1): super(ReversedEulerIntegrator, self).__init__(A, B, step, r) n = A.shape[0] I = np.identity(n) P = np.linalg.inv((I - step * A)) self.P = P self.Q = P @ (step * B) def next(self, x, t): return self.P @ x + self.Q * self.r(t + self.step) def next_implicit(self, x, t, x_next_predicted): delta_x = self.step * (self.A @ x_next_predicted + self.B * self.r(t + self.step)) return x + delta_x class TrapezeIntegrator(ImplicitIntegrator): def __init__(self, A, B=0, step=0.01, r=lambda t: 1): super(TrapezeIntegrator, self).__init__(A, B, step, r) n = A.shape[0] I = np.identity(n) P = np.linalg.inv(I - A * step/2) self.R = P @ (I + A * step/2) self.S = P @ (step/2 * B) def next(self, x, t): return self.R @ x + self.S * (self.r(t) + self.r(t + self.step)) def next_implicit(self, x, t, x_next_predicted): delta_x = self.step/2 * (self.A @ x + self.B * self.r(t) + self.A @ x_next_predicted + self.B * self.r(t + self.step)) return x + delta_x class RungeKutta4Real(Integrator): def __init__(self, A, B=0, step=0.01, r=lambda t: 1): super(RungeKutta4Real, self).__init__(A, B, step, r) def __f(self, x, t): return self.A @ x + self.B * self.r(t) def next(self, x, t): T = self.step m1 = self.__f(x, t) m2 = self.__f(x + T/2 * m1, t + T/2) m3 = self.__f(x + T/2 * m2, t + T/2) m4 = self.__f(x + T*m3, t + T) return x + T/6 * (m1 + 2*m2 + 2*m3 + m4) class PECEIntegrator(Integrator): def __init__(self, predictor: Integrator, corrector: ImplicitIntegrator, s: int): self.predictor = predictor self.corrector = corrector self.s = s self.step = predictor.step def next(self, x, t): x_next = self.predictor.next(x, t) for i in range(self.s): x_next = self.corrector.next_implicit(x, t, x_next) return x_next
0
0
d5ff97767b950838b9579739fb6e42b24968f25c
24,761
py
Python
src/auctioneer.py
Brechard/Auctioneer
f530a0e00d333a9f4d2201e61094b9b4986117a5
[ "MIT" ]
2
2019-02-10T12:07:00.000Z
2020-03-15T13:29:47.000Z
src/auctioneer.py
Brechard/Auctioneer
f530a0e00d333a9f4d2201e61094b9b4986117a5
[ "MIT" ]
null
null
null
src/auctioneer.py
Brechard/Auctioneer
f530a0e00d333a9f4d2201e61094b9b4986117a5
[ "MIT" ]
null
null
null
import random import matplotlib.pyplot as plt import numpy as np from prettytable import PrettyTable from auction import Auction class Auctioneer: def __init__(self, penalty_factor=0.1, bidding_factor_strategy=[], use_seller=True, starting_prices=[], M_types=3, K_sellers=4, N_buyers=10, R_rounds=3, level_comm_flag=False, debug=True, universal_maximum_price=100): """ :param penalty_factor: Multiplier for fee calculationz :param bidding_factor_strategy: Array with the bidding factor strategy of each buyer :param use_seller: Flag to use seller or item as second dimension for alpha :param starting_prices: Debug purposes, starting prices can be forced this way. :param M_types: Number of types of items :param K_sellers: Number of sellers :param N_buyers: Number of buyers :param R_rounds: Number of rounds :param level_comm_flag: Flag to say if level commitment is allowed or not :param debug: Flag for debug prints :param universal_maximum_price: Max initial starting price """ self.debug = debug if len(bidding_factor_strategy) == 0: # If the strategy is not passed, it is set to default 0 # bidding_factor_strategy = [np.random.randint(2, 4, 1) for n in range(N_buyers)] bidding_factor_strategy = [2 for n in range(N_buyers)] else: for s in bidding_factor_strategy: if s not in [1, 2, 3, 4]: print("Error in the strategy input") return self.m_item_types = range(M_types) self.k_sellers = K_sellers self.n_buyers = N_buyers self.r_rounds = R_rounds self.max_starting_price = universal_maximum_price self.penalty_factor = penalty_factor # If level commitment is activated sellers cannot cancel a won auction self.level_commitment_activated = level_comm_flag self.buyers_already_won = self.initialize_buyers_flag() self.auctions_history = [] # Assign a type of item to each seller randomly self.sellers_types = [random.sample(self.m_item_types, 1)[0] for seller in range(self.k_sellers)] # Assign second dimension of alpha following the input flag if use_seller: self.second_dimension = self.k_sellers else: self.second_dimension = M_types self.bidding_factor_strategy = bidding_factor_strategy self.bidding_factor = self.calculate_bidding_factor() self.increase_bidding_factor = np.random.uniform(1, 1.5, size=self.n_buyers) self.decrease_bidding_factor = np.random.uniform(0.3, 0.8, size=self.n_buyers) # Ceiling threshold for strategy 2 self.ceiling = 2 self.market_price = np.zeros((self.r_rounds, self.k_sellers)) self.buyers_profits = np.zeros((self.r_rounds, self.n_buyers)) self.cumulative_buyers_profits = np.zeros((self.n_buyers, self.r_rounds)) self.cumulative_sellers_profits = np.zeros((self.k_sellers, self.r_rounds)) self.sellers_profits = np.zeros((self.r_rounds, self.k_sellers)) self.starting_prices = self.calculate_starting_prices(starting_prices) self.print_alphas() self.times_items_returned = 0 self.times_bad_trade = 0 def calculate_bid(self, buyer_id, item_type, seller_id, starting_price, auction_round): """ Calculate the bid for a specific buyer considering his bidding strategy :param buyer_id: id of the buyer to calculate the bid from :param item_type: kind of item that is being auction :param seller_id: id of the seller that is auctioning :param starting_price: starting price of the item that is being auctioned :param auction_round: round of the auction :return: bid of the buyer """ second_dimension = seller_id if self.second_dimension == len(self.m_item_types): second_dimension = item_type bid = self.bidding_factor[buyer_id][second_dimension] * starting_price if not self.level_commitment_activated \ or not self.buyers_already_won[buyer_id]: # If the buyer flag is not ON it means the buyer hasn't win an auction in this round yet return bid auction, seller = self.get_auction_with_winner(buyer_id, auction_round) previous_profit, market_price = auction.winner_profit, auction.market_price penalty = self.calculate_fee(market_price - previous_profit) return max(bid, starting_price + previous_profit + penalty) def calculate_bidding_factor(self): """ Bidding factor strategies: 1 - When an auction is won, the bidding factor is multiplied by the increasing factor and when lost by the decreasing factor 2 - Depends on the kind of item, but has a max value to avoid price explosion. If alpha bigger than 2, decrease it using decrease factor. 3 - Depends on the kind of item, if the bid is higher than market price, bidding factor is multiplied by the decreasing factor while if it is lower multiply by the increasing factor. """ bidding_factor = [] for buyer in range(self.n_buyers): bidding_factor.append( np.random.uniform(1, 2, self.second_dimension) ) return bidding_factor def calculate_starting_prices(self, starting_prices): """ Calculate the starting prices of the sellers. If the input parameter is empty they will be empty otherwise they will be the same as the input parameter, this is only for debug purposes. :param starting_prices: DEBUG purposes. Set with the desired initial prices. If empty calculate them randomly. :return: the starting prices for the auctions """ if len(starting_prices) > 0: return starting_prices prices = [] for seller in range(self.k_sellers): prices.append(random.random() * self.max_starting_price) return prices def calculate_fee(self, price_paid): # Calculate the fee to pay for an item if it is cancelled return self.penalty_factor * price_paid def choose_item_to_keep(self, auction, market_price, price_to_pay, winner, seller, auction_round): """ When an buyers wins a second item in a round one of the items has to be returned. The agent is rational and therefore will always keep the item with higher return considering the fee to pay for the returned item. :param auction: auction object with the information of the auction that made the buyer win the new item :param market_price: market price of the item just won :param price_to_pay: price paid for the new item :param winner: id of the buyer :param seller: id of the seller :param auction_round: round of the auction """ self.times_items_returned += 1 previous_auction, previous_seller = self.get_auction_with_winner(winner, auction_round) previous_winner_profit = previous_auction.winner_profit previous_fee = self.calculate_fee(previous_auction.price_paid) new_profit = market_price - price_to_pay new_fee = self.calculate_fee(price_to_pay) if new_profit - previous_fee > previous_winner_profit - new_fee: # It is profitable to keep the new item, pay fee to previous seller previous_auction.return_item(previous_fee, kept_item_profit=new_profit, kept_item_fee=new_fee, seller_item_kept=seller, kept_item_price=price_to_pay) if new_profit - previous_fee < 0: self.times_bad_trade += 1 else: auction.return_item(new_fee, kept_item_profit=previous_winner_profit, kept_item_fee=previous_fee, seller_item_kept=previous_seller, kept_item_price=previous_auction.price_paid) if previous_winner_profit - new_fee < 0: self.times_bad_trade += 1 def choose_winner(self, bids, market_price): """ Chooose the winner of an auction. :param bids: map with the bids made by the buyers. Key is the id of the buyer and Value the bid :param market_price: market price of the item to sell :return: id of the buyer that wins the item, price to pay by the winner """ valid_bids = [] for bid in bids.values(): if bid > market_price: continue valid_bids.append(bid) if len(valid_bids) == 0: valid_bids.append(next(iter(bids.values()))) valid_bids = sorted(valid_bids, reverse=True) winner_id = [key for key in bids.keys() if bids[key] == valid_bids[0]][0] try: price_to_pay = valid_bids[1] except IndexError: price_to_pay = valid_bids[0] return winner_id, price_to_pay def get_alphas(self, seller, item): """ Get the bidding factors :param seller: id of the seller :param item: kind of item :return: bidding factors """ second_dimension = seller if self.second_dimension == len(self.m_item_types): second_dimension = item alphas = [] for buyer in range(self.n_buyers): alphas.append(self.bidding_factor[buyer][second_dimension]) return alphas def get_auction_with_winner(self, winner, auction_round): """ Retrieve the auction object of a previous auction with the winner. Used when level commitment is activated and a buyer wins a second time. :param winner: id of the winner :param auction_round: round of the auction :return: auction object, seller id of the auction """ seller = 0 for auction in self.auctions_history[auction_round]: if winner == auction.winner: return auction, seller seller += 1 assert 0 == 1 def initialize_auction_parameters(self, seller): # Initialize all the parameters needed for an auction starting_price = self.starting_prices[seller] n_buyer_auction = 0 total_bid = 0 buyers_bid = {} item = self.sellers_types[seller] return buyers_bid, item, n_buyer_auction, starting_price, total_bid def initialize_buyers_flag(self): # Initialize the list with the flags that indicates if a buyer has already won an auction in the round return [False for buyer in range(self.n_buyers)] def print_alphas(self, extra_debug=False): """ Print the values of the bidding factors. :param extra_debug: Even if in the parent object debug is set to false, it is possible that this printing is required. With this input parameter this is possible. """ if not self.debug and not extra_debug: return buyer = 0 alphas_table = PrettyTable() if self.second_dimension == self.k_sellers: alphas_table.field_names = ["S-0"] + ["S" + str(seller) for seller in range(self.k_sellers)] elif self.second_dimension == len(self.m_item_types): alphas_table.field_names = ["S-1"] + ["Type " + str(item_type) for item_type in self.m_item_types] for strategy in self.bidding_factor_strategy: alphas_table.add_row(["B" + str(buyer)] + ['%.2f' % elem for elem in self.bidding_factor[buyer]]) str_0 = True buyer += 1 print(alphas_table) def print_factors(self, extra_debug=False): """ Print the increasing and decreasing factors for every buyer. :param extra_debug: Even if in the parent object debug is set to false, it is possible that this printing is required. With this input parameter this is possible. """ if not self.debug and not extra_debug: return initial_table = PrettyTable() initial_table.field_names = [""] + ["B" + str(buyer) for buyer in range(self.n_buyers)] initial_table.add_row(["Increasing factor"] + ['%.2f' % elem for elem in self.increase_bidding_factor]) initial_table.add_row(["Decreasing factor"] + ['%.2f' % elem for elem in self.decrease_bidding_factor]) print(initial_table) def print_round(self, round_number, extra_debug=False): """ Print the information of all the auctions in a round :param round_number: round of auction :param extra_debug: Even if in the parent object debug is set to false, it is possible that this printing is required. With this input parameter this is possible. \ """ if not self.debug and not extra_debug: return print() print("Round", round_number, "history") seller = 0 for auction in self.auctions_history[round_number]: auction.print_auction(seller) seller += 1 print() print("------------------------------------------------------") def update_alphas(self, winner, seller, item, bids): """ Update the bidding factor depending on the strategies of each buyer :param winner: id of the winner of the auction :param seller: seller of the item of the auction :param item: kind of items that the seller auctions :param bids: dictionary with the bids of the buyers, key is the id of the buyer and the value is the bid :return: new alphas after updating """ second_dimension = seller if self.second_dimension == len(self.m_item_types): second_dimension = item new_alphas = [] for buyer in range(self.n_buyers): if self.bidding_factor_strategy[buyer] == 1: if buyer == winner: self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] elif self.buyers_already_won[buyer] and not self.level_commitment_activated: self.bidding_factor[buyer][second_dimension] = self.bidding_factor[buyer][second_dimension] else: self.bidding_factor[buyer][second_dimension] *= self.increase_bidding_factor[buyer] new_alphas.append(self.bidding_factor[buyer][second_dimension]) # Strategy 2 - Depends on the kind of item, but has a max value to avoid price explosion. # If alpha bigger than ceiling, decrease it using decrease factor. elif self.bidding_factor_strategy[buyer] == 2: # if buyer == winner: # Do not update if buyer != winner and self.bidding_factor[buyer][second_dimension] < self.ceiling: self.bidding_factor[buyer][second_dimension] *= self.increase_bidding_factor[buyer] elif self.buyers_already_won[buyer] and not self.level_commitment_activated: continue elif buyer != winner and self.bidding_factor[buyer][second_dimension] > self.ceiling: self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] new_alphas.append(self.bidding_factor[buyer][second_dimension]) # Strategy 3 - Depends on the kind of item, but checks the market price # to see if previous alpha update was helpful or not elif self.bidding_factor_strategy[buyer] == 3: if buyer == winner: self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] elif self.buyers_already_won[buyer] and not self.level_commitment_activated: self.bidding_factor[buyer][second_dimension] = self.bidding_factor[buyer][second_dimension] else: if bids[buyer] > np.mean(list(bids.values())): self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] else: self.bidding_factor[buyer][second_dimension] *= self.increase_bidding_factor[buyer] new_alphas.append(self.bidding_factor[buyer][second_dimension]) # Strategy 4 - Fully random each time # to see if previous alpha update was helpful or not elif self.bidding_factor_strategy[buyer] == 4: self.bidding_factor[buyer][second_dimension] = np.random.uniform(1, 2) new_alphas.append(self.bidding_factor[buyer][second_dimension]) # If the bidding factor is less than 1, replace it with the increasing factor if self.bidding_factor[buyer][second_dimension] < 1: self.bidding_factor[buyer][second_dimension] = self.increase_bidding_factor[buyer] return new_alphas def update_profits(self, auction_round): """ Update the profit of every buyer and seller after a round is finished :param auction_round: number of round """ seller = 0 for auction in self.auctions_history[auction_round]: self.buyers_profits[auction_round, auction.winner] += auction.winner_profit self.sellers_profits[auction_round, seller] += auction.seller_profit seller += 1 for buyer in range(self.n_buyers): self.cumulative_buyers_profits[buyer][auction_round] = self.cumulative_buyers_profits[ buyer, auction_round - 1] + self.buyers_profits[ auction_round, buyer] for seller in range(self.k_sellers): self.cumulative_sellers_profits[seller][auction_round] = self.cumulative_sellers_profits[ seller, auction_round - 1] + \ self.sellers_profits[auction_round, seller] def start_auction(self): """ Main method of the program, runs the actual simulation """ self.print_factors() for auction_round in range(self.r_rounds): self.buyers_already_won = self.initialize_buyers_flag() self.auctions_history.append([]) for seller in range(self.k_sellers): buyers_bid, item, n_buyer_auction, starting_price, total_bid = self.initialize_auction_parameters( seller) for buyer in range(self.n_buyers): if self.buyers_already_won[buyer] and not self.level_commitment_activated: continue n_buyer_auction += 1 bid = self.calculate_bid(buyer, item, seller, starting_price, auction_round) buyers_bid[buyer] = bid total_bid += bid market_price = total_bid / n_buyer_auction winner, price_to_pay = self.choose_winner(buyers_bid, market_price) auction = self.store_auction_history(winner=winner, price_paid=price_to_pay, starting_price=starting_price, market_price=market_price, bid_history=buyers_bid, previous_alphas=self.get_alphas(seller, item), auction_round=auction_round, item_kind=item) if self.level_commitment_activated and self.buyers_already_won[winner]: # The buyer already won an auction in this round so he has to choose which one to return self.choose_item_to_keep(auction, market_price, price_to_pay, winner, seller, auction_round) self.market_price[auction_round, seller] = market_price new_alphas = self.update_alphas(winner, seller, item, buyers_bid) auction.set_new_alphas(new_alphas) self.buyers_already_won[winner] = True self.update_profits(auction_round) self.print_round(auction_round) def store_auction_history(self, starting_price, market_price, winner, price_paid, bid_history, previous_alphas, auction_round, item_kind): """ Store the information of an auction in an auction object and store it in the auctions history :param starting_price: Starting price of the auction :param market_price: market price of the item :param winner: id of the buyer that wins the auction :param price_paid: price that the buyer pays for the item :param bid_history: dictionary with the bid of the buyers :param previous_alphas: bidding factor before the auction :param auction_round: round that this auction took place in :param item_kind: kind of item that is sold :return: auction object with all the information """ auction = Auction(starting_price, market_price, price_paid, winner, bid_history, previous_alphas, item_kind) self.auctions_history[auction_round].append(auction) return auction def plot_statistics(self): """ Plot the statistics of the history of the prices, the profit of the buyers and the sellers \ """ market_prices = np.zeros((self.r_rounds, self.k_sellers)) for n, auctions_round in enumerate(self.auctions_history): for seller in range(self.k_sellers): market_prices[n, seller] = auctions_round[seller].market_price # Plot price history for seller in range(self.k_sellers): if self.bidding_factor_strategy[0] == 1: plt.semilogy(market_prices[:, seller], label="Seller " + str(seller)) else: plt.plot(market_prices[:, seller], label="Seller " + str(seller)) plt.title('Price history across all rounds for each seller') plt.ylabel('Price') plt.xlabel('Auctions') plt.legend() if self.r_rounds < 10: plt.xticks(range(self.r_rounds)) # Plot seller profits plt.figure() for seller in range(self.k_sellers): if self.bidding_factor_strategy[0] == 1: plt.semilogy(self.cumulative_sellers_profits[seller], label="Seller " + str(seller)) else: plt.plot(self.cumulative_sellers_profits[seller], label="Seller " + str(seller)) plt.title('Seller cumulative profits across all auctions') plt.ylabel('Seller profits') plt.xlabel('Rounds') plt.legend() if self.r_rounds < 10: plt.xticks(range(self.r_rounds)) # Plot Buyers profits plt.figure() for buyer in range(self.n_buyers): if self.bidding_factor_strategy[0] == 1: plt.semilogy(self.cumulative_buyers_profits[buyer], label="Buyer " + str(buyer)) else: plt.plot(self.cumulative_buyers_profits[buyer], label="Buyer " + str(buyer)) plt.title('Buyer cumulative profits across all auctions') plt.ylabel('Buyer profits') plt.xlabel('Rounds') plt.legend() if self.r_rounds < 10: plt.xticks(range(self.r_rounds)) plt.show() if __name__ == '__main__': buyers = 10 strategy = [1 for n in range(buyers)] # strategy[0] = 4 auctioneer = Auctioneer(0.1, bidding_factor_strategy=strategy, M_types=3, K_sellers=4, N_buyers=buyers, R_rounds=100, level_comm_flag=False, use_seller=False, debug=True) auctioneer.start_auction() auctioneer.plot_statistics() print("\nBidding factors when the simulation is finished") auctioneer.print_alphas()
45.26691
119
0.619482
import random import matplotlib.pyplot as plt import numpy as np from prettytable import PrettyTable from auction import Auction class Auctioneer: def __init__(self, penalty_factor=0.1, bidding_factor_strategy=[], use_seller=True, starting_prices=[], M_types=3, K_sellers=4, N_buyers=10, R_rounds=3, level_comm_flag=False, debug=True, universal_maximum_price=100): """ :param penalty_factor: Multiplier for fee calculationz :param bidding_factor_strategy: Array with the bidding factor strategy of each buyer :param use_seller: Flag to use seller or item as second dimension for alpha :param starting_prices: Debug purposes, starting prices can be forced this way. :param M_types: Number of types of items :param K_sellers: Number of sellers :param N_buyers: Number of buyers :param R_rounds: Number of rounds :param level_comm_flag: Flag to say if level commitment is allowed or not :param debug: Flag for debug prints :param universal_maximum_price: Max initial starting price """ self.debug = debug if len(bidding_factor_strategy) == 0: # If the strategy is not passed, it is set to default 0 # bidding_factor_strategy = [np.random.randint(2, 4, 1) for n in range(N_buyers)] bidding_factor_strategy = [2 for n in range(N_buyers)] else: for s in bidding_factor_strategy: if s not in [1, 2, 3, 4]: print("Error in the strategy input") return self.m_item_types = range(M_types) self.k_sellers = K_sellers self.n_buyers = N_buyers self.r_rounds = R_rounds self.max_starting_price = universal_maximum_price self.penalty_factor = penalty_factor # If level commitment is activated sellers cannot cancel a won auction self.level_commitment_activated = level_comm_flag self.buyers_already_won = self.initialize_buyers_flag() self.auctions_history = [] # Assign a type of item to each seller randomly self.sellers_types = [random.sample(self.m_item_types, 1)[0] for seller in range(self.k_sellers)] # Assign second dimension of alpha following the input flag if use_seller: self.second_dimension = self.k_sellers else: self.second_dimension = M_types self.bidding_factor_strategy = bidding_factor_strategy self.bidding_factor = self.calculate_bidding_factor() self.increase_bidding_factor = np.random.uniform(1, 1.5, size=self.n_buyers) self.decrease_bidding_factor = np.random.uniform(0.3, 0.8, size=self.n_buyers) # Ceiling threshold for strategy 2 self.ceiling = 2 self.market_price = np.zeros((self.r_rounds, self.k_sellers)) self.buyers_profits = np.zeros((self.r_rounds, self.n_buyers)) self.cumulative_buyers_profits = np.zeros((self.n_buyers, self.r_rounds)) self.cumulative_sellers_profits = np.zeros((self.k_sellers, self.r_rounds)) self.sellers_profits = np.zeros((self.r_rounds, self.k_sellers)) self.starting_prices = self.calculate_starting_prices(starting_prices) self.print_alphas() self.times_items_returned = 0 self.times_bad_trade = 0 def calculate_bid(self, buyer_id, item_type, seller_id, starting_price, auction_round): """ Calculate the bid for a specific buyer considering his bidding strategy :param buyer_id: id of the buyer to calculate the bid from :param item_type: kind of item that is being auction :param seller_id: id of the seller that is auctioning :param starting_price: starting price of the item that is being auctioned :param auction_round: round of the auction :return: bid of the buyer """ second_dimension = seller_id if self.second_dimension == len(self.m_item_types): second_dimension = item_type bid = self.bidding_factor[buyer_id][second_dimension] * starting_price if not self.level_commitment_activated \ or not self.buyers_already_won[buyer_id]: # If the buyer flag is not ON it means the buyer hasn't win an auction in this round yet return bid auction, seller = self.get_auction_with_winner(buyer_id, auction_round) previous_profit, market_price = auction.winner_profit, auction.market_price penalty = self.calculate_fee(market_price - previous_profit) return max(bid, starting_price + previous_profit + penalty) def calculate_bidding_factor(self): """ Bidding factor strategies: 1 - When an auction is won, the bidding factor is multiplied by the increasing factor and when lost by the decreasing factor 2 - Depends on the kind of item, but has a max value to avoid price explosion. If alpha bigger than 2, decrease it using decrease factor. 3 - Depends on the kind of item, if the bid is higher than market price, bidding factor is multiplied by the decreasing factor while if it is lower multiply by the increasing factor. """ bidding_factor = [] for buyer in range(self.n_buyers): bidding_factor.append( np.random.uniform(1, 2, self.second_dimension) ) return bidding_factor def calculate_starting_prices(self, starting_prices): """ Calculate the starting prices of the sellers. If the input parameter is empty they will be empty otherwise they will be the same as the input parameter, this is only for debug purposes. :param starting_prices: DEBUG purposes. Set with the desired initial prices. If empty calculate them randomly. :return: the starting prices for the auctions """ if len(starting_prices) > 0: return starting_prices prices = [] for seller in range(self.k_sellers): prices.append(random.random() * self.max_starting_price) return prices def calculate_fee(self, price_paid): # Calculate the fee to pay for an item if it is cancelled return self.penalty_factor * price_paid def choose_item_to_keep(self, auction, market_price, price_to_pay, winner, seller, auction_round): """ When an buyers wins a second item in a round one of the items has to be returned. The agent is rational and therefore will always keep the item with higher return considering the fee to pay for the returned item. :param auction: auction object with the information of the auction that made the buyer win the new item :param market_price: market price of the item just won :param price_to_pay: price paid for the new item :param winner: id of the buyer :param seller: id of the seller :param auction_round: round of the auction """ self.times_items_returned += 1 previous_auction, previous_seller = self.get_auction_with_winner(winner, auction_round) previous_winner_profit = previous_auction.winner_profit previous_fee = self.calculate_fee(previous_auction.price_paid) new_profit = market_price - price_to_pay new_fee = self.calculate_fee(price_to_pay) if new_profit - previous_fee > previous_winner_profit - new_fee: # It is profitable to keep the new item, pay fee to previous seller previous_auction.return_item(previous_fee, kept_item_profit=new_profit, kept_item_fee=new_fee, seller_item_kept=seller, kept_item_price=price_to_pay) if new_profit - previous_fee < 0: self.times_bad_trade += 1 else: auction.return_item(new_fee, kept_item_profit=previous_winner_profit, kept_item_fee=previous_fee, seller_item_kept=previous_seller, kept_item_price=previous_auction.price_paid) if previous_winner_profit - new_fee < 0: self.times_bad_trade += 1 def choose_winner(self, bids, market_price): """ Chooose the winner of an auction. :param bids: map with the bids made by the buyers. Key is the id of the buyer and Value the bid :param market_price: market price of the item to sell :return: id of the buyer that wins the item, price to pay by the winner """ valid_bids = [] for bid in bids.values(): if bid > market_price: continue valid_bids.append(bid) if len(valid_bids) == 0: valid_bids.append(next(iter(bids.values()))) valid_bids = sorted(valid_bids, reverse=True) winner_id = [key for key in bids.keys() if bids[key] == valid_bids[0]][0] try: price_to_pay = valid_bids[1] except IndexError: price_to_pay = valid_bids[0] return winner_id, price_to_pay def get_alphas(self, seller, item): """ Get the bidding factors :param seller: id of the seller :param item: kind of item :return: bidding factors """ second_dimension = seller if self.second_dimension == len(self.m_item_types): second_dimension = item alphas = [] for buyer in range(self.n_buyers): alphas.append(self.bidding_factor[buyer][second_dimension]) return alphas def get_auction_with_winner(self, winner, auction_round): """ Retrieve the auction object of a previous auction with the winner. Used when level commitment is activated and a buyer wins a second time. :param winner: id of the winner :param auction_round: round of the auction :return: auction object, seller id of the auction """ seller = 0 for auction in self.auctions_history[auction_round]: if winner == auction.winner: return auction, seller seller += 1 assert 0 == 1 def initialize_auction_parameters(self, seller): # Initialize all the parameters needed for an auction starting_price = self.starting_prices[seller] n_buyer_auction = 0 total_bid = 0 buyers_bid = {} item = self.sellers_types[seller] return buyers_bid, item, n_buyer_auction, starting_price, total_bid def initialize_buyers_flag(self): # Initialize the list with the flags that indicates if a buyer has already won an auction in the round return [False for buyer in range(self.n_buyers)] def print_alphas(self, extra_debug=False): """ Print the values of the bidding factors. :param extra_debug: Even if in the parent object debug is set to false, it is possible that this printing is required. With this input parameter this is possible. """ if not self.debug and not extra_debug: return buyer = 0 alphas_table = PrettyTable() if self.second_dimension == self.k_sellers: alphas_table.field_names = ["S-0"] + ["S" + str(seller) for seller in range(self.k_sellers)] elif self.second_dimension == len(self.m_item_types): alphas_table.field_names = ["S-1"] + ["Type " + str(item_type) for item_type in self.m_item_types] for strategy in self.bidding_factor_strategy: alphas_table.add_row(["B" + str(buyer)] + ['%.2f' % elem for elem in self.bidding_factor[buyer]]) str_0 = True buyer += 1 print(alphas_table) def print_factors(self, extra_debug=False): """ Print the increasing and decreasing factors for every buyer. :param extra_debug: Even if in the parent object debug is set to false, it is possible that this printing is required. With this input parameter this is possible. """ if not self.debug and not extra_debug: return initial_table = PrettyTable() initial_table.field_names = [""] + ["B" + str(buyer) for buyer in range(self.n_buyers)] initial_table.add_row(["Increasing factor"] + ['%.2f' % elem for elem in self.increase_bidding_factor]) initial_table.add_row(["Decreasing factor"] + ['%.2f' % elem for elem in self.decrease_bidding_factor]) print(initial_table) def print_round(self, round_number, extra_debug=False): """ Print the information of all the auctions in a round :param round_number: round of auction :param extra_debug: Even if in the parent object debug is set to false, it is possible that this printing is required. With this input parameter this is possible. \ """ if not self.debug and not extra_debug: return print() print("Round", round_number, "history") seller = 0 for auction in self.auctions_history[round_number]: auction.print_auction(seller) seller += 1 print() print("------------------------------------------------------") def update_alphas(self, winner, seller, item, bids): """ Update the bidding factor depending on the strategies of each buyer :param winner: id of the winner of the auction :param seller: seller of the item of the auction :param item: kind of items that the seller auctions :param bids: dictionary with the bids of the buyers, key is the id of the buyer and the value is the bid :return: new alphas after updating """ second_dimension = seller if self.second_dimension == len(self.m_item_types): second_dimension = item new_alphas = [] for buyer in range(self.n_buyers): if self.bidding_factor_strategy[buyer] == 1: if buyer == winner: self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] elif self.buyers_already_won[buyer] and not self.level_commitment_activated: self.bidding_factor[buyer][second_dimension] = self.bidding_factor[buyer][second_dimension] else: self.bidding_factor[buyer][second_dimension] *= self.increase_bidding_factor[buyer] new_alphas.append(self.bidding_factor[buyer][second_dimension]) # Strategy 2 - Depends on the kind of item, but has a max value to avoid price explosion. # If alpha bigger than ceiling, decrease it using decrease factor. elif self.bidding_factor_strategy[buyer] == 2: # if buyer == winner: # Do not update if buyer != winner and self.bidding_factor[buyer][second_dimension] < self.ceiling: self.bidding_factor[buyer][second_dimension] *= self.increase_bidding_factor[buyer] elif self.buyers_already_won[buyer] and not self.level_commitment_activated: continue elif buyer != winner and self.bidding_factor[buyer][second_dimension] > self.ceiling: self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] new_alphas.append(self.bidding_factor[buyer][second_dimension]) # Strategy 3 - Depends on the kind of item, but checks the market price # to see if previous alpha update was helpful or not elif self.bidding_factor_strategy[buyer] == 3: if buyer == winner: self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] elif self.buyers_already_won[buyer] and not self.level_commitment_activated: self.bidding_factor[buyer][second_dimension] = self.bidding_factor[buyer][second_dimension] else: if bids[buyer] > np.mean(list(bids.values())): self.bidding_factor[buyer][second_dimension] *= self.decrease_bidding_factor[buyer] else: self.bidding_factor[buyer][second_dimension] *= self.increase_bidding_factor[buyer] new_alphas.append(self.bidding_factor[buyer][second_dimension]) # Strategy 4 - Fully random each time # to see if previous alpha update was helpful or not elif self.bidding_factor_strategy[buyer] == 4: self.bidding_factor[buyer][second_dimension] = np.random.uniform(1, 2) new_alphas.append(self.bidding_factor[buyer][second_dimension]) # If the bidding factor is less than 1, replace it with the increasing factor if self.bidding_factor[buyer][second_dimension] < 1: self.bidding_factor[buyer][second_dimension] = self.increase_bidding_factor[buyer] return new_alphas def update_profits(self, auction_round): """ Update the profit of every buyer and seller after a round is finished :param auction_round: number of round """ seller = 0 for auction in self.auctions_history[auction_round]: self.buyers_profits[auction_round, auction.winner] += auction.winner_profit self.sellers_profits[auction_round, seller] += auction.seller_profit seller += 1 for buyer in range(self.n_buyers): self.cumulative_buyers_profits[buyer][auction_round] = self.cumulative_buyers_profits[ buyer, auction_round - 1] + self.buyers_profits[ auction_round, buyer] for seller in range(self.k_sellers): self.cumulative_sellers_profits[seller][auction_round] = self.cumulative_sellers_profits[ seller, auction_round - 1] + \ self.sellers_profits[auction_round, seller] def start_auction(self): """ Main method of the program, runs the actual simulation """ self.print_factors() for auction_round in range(self.r_rounds): self.buyers_already_won = self.initialize_buyers_flag() self.auctions_history.append([]) for seller in range(self.k_sellers): buyers_bid, item, n_buyer_auction, starting_price, total_bid = self.initialize_auction_parameters( seller) for buyer in range(self.n_buyers): if self.buyers_already_won[buyer] and not self.level_commitment_activated: continue n_buyer_auction += 1 bid = self.calculate_bid(buyer, item, seller, starting_price, auction_round) buyers_bid[buyer] = bid total_bid += bid market_price = total_bid / n_buyer_auction winner, price_to_pay = self.choose_winner(buyers_bid, market_price) auction = self.store_auction_history(winner=winner, price_paid=price_to_pay, starting_price=starting_price, market_price=market_price, bid_history=buyers_bid, previous_alphas=self.get_alphas(seller, item), auction_round=auction_round, item_kind=item) if self.level_commitment_activated and self.buyers_already_won[winner]: # The buyer already won an auction in this round so he has to choose which one to return self.choose_item_to_keep(auction, market_price, price_to_pay, winner, seller, auction_round) self.market_price[auction_round, seller] = market_price new_alphas = self.update_alphas(winner, seller, item, buyers_bid) auction.set_new_alphas(new_alphas) self.buyers_already_won[winner] = True self.update_profits(auction_round) self.print_round(auction_round) def store_auction_history(self, starting_price, market_price, winner, price_paid, bid_history, previous_alphas, auction_round, item_kind): """ Store the information of an auction in an auction object and store it in the auctions history :param starting_price: Starting price of the auction :param market_price: market price of the item :param winner: id of the buyer that wins the auction :param price_paid: price that the buyer pays for the item :param bid_history: dictionary with the bid of the buyers :param previous_alphas: bidding factor before the auction :param auction_round: round that this auction took place in :param item_kind: kind of item that is sold :return: auction object with all the information """ auction = Auction(starting_price, market_price, price_paid, winner, bid_history, previous_alphas, item_kind) self.auctions_history[auction_round].append(auction) return auction def plot_statistics(self): """ Plot the statistics of the history of the prices, the profit of the buyers and the sellers \ """ market_prices = np.zeros((self.r_rounds, self.k_sellers)) for n, auctions_round in enumerate(self.auctions_history): for seller in range(self.k_sellers): market_prices[n, seller] = auctions_round[seller].market_price # Plot price history for seller in range(self.k_sellers): if self.bidding_factor_strategy[0] == 1: plt.semilogy(market_prices[:, seller], label="Seller " + str(seller)) else: plt.plot(market_prices[:, seller], label="Seller " + str(seller)) plt.title('Price history across all rounds for each seller') plt.ylabel('Price') plt.xlabel('Auctions') plt.legend() if self.r_rounds < 10: plt.xticks(range(self.r_rounds)) # Plot seller profits plt.figure() for seller in range(self.k_sellers): if self.bidding_factor_strategy[0] == 1: plt.semilogy(self.cumulative_sellers_profits[seller], label="Seller " + str(seller)) else: plt.plot(self.cumulative_sellers_profits[seller], label="Seller " + str(seller)) plt.title('Seller cumulative profits across all auctions') plt.ylabel('Seller profits') plt.xlabel('Rounds') plt.legend() if self.r_rounds < 10: plt.xticks(range(self.r_rounds)) # Plot Buyers profits plt.figure() for buyer in range(self.n_buyers): if self.bidding_factor_strategy[0] == 1: plt.semilogy(self.cumulative_buyers_profits[buyer], label="Buyer " + str(buyer)) else: plt.plot(self.cumulative_buyers_profits[buyer], label="Buyer " + str(buyer)) plt.title('Buyer cumulative profits across all auctions') plt.ylabel('Buyer profits') plt.xlabel('Rounds') plt.legend() if self.r_rounds < 10: plt.xticks(range(self.r_rounds)) plt.show() if __name__ == '__main__': buyers = 10 strategy = [1 for n in range(buyers)] # strategy[0] = 4 auctioneer = Auctioneer(0.1, bidding_factor_strategy=strategy, M_types=3, K_sellers=4, N_buyers=buyers, R_rounds=100, level_comm_flag=False, use_seller=False, debug=True) auctioneer.start_auction() auctioneer.plot_statistics() print("\nBidding factors when the simulation is finished") auctioneer.print_alphas()
0
0
bc8ce23b2de1c59918fb8dc6dfc87ea85a63c990
2,733
py
Python
atc/LINE-master/train.py
anaeliaovalle/atc-mt-dti
755bd175e852ef2a6792be7244b006ebed252d8d
[ "MIT" ]
null
null
null
atc/LINE-master/train.py
anaeliaovalle/atc-mt-dti
755bd175e852ef2a6792be7244b006ebed252d8d
[ "MIT" ]
null
null
null
atc/LINE-master/train.py
anaeliaovalle/atc-mt-dti
755bd175e852ef2a6792be7244b006ebed252d8d
[ "MIT" ]
null
null
null
import argparse from utils.utils import * from utils.line import Line from tqdm import trange import torch import torch.optim as optim import sys import pickle if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("-g", "--graph_path", type=str) parser.add_argument("-save", "--save_path", type=str) parser.add_argument("-lossdata", "--lossdata_path", type=str) # Hyperparams. parser.add_argument("-order", "--order", type=int, default=2) parser.add_argument("-neg", "--negsamplesize", type=int, default=5) parser.add_argument("-dim", "--dimension", type=int, default=128) parser.add_argument("-batchsize", "--batchsize", type=int, default=5) parser.add_argument("-epochs", "--epochs", type=int, default=1) parser.add_argument("-lr", "--learning_rate", type=float, default=0.025) # As starting value in paper parser.add_argument("-negpow", "--negativepower", type=float, default=0.75) args = parser.parse_args() # Create dict of distribution when opening file edgedistdict, nodedistdict, weights, nodedegrees, maxindex = makeDist( args.graph_path, args.negativepower) edgesaliassampler = VoseAlias(edgedistdict) nodesaliassampler = VoseAlias(nodedistdict) batchrange = int(len(edgedistdict) / args.batchsize) print(maxindex) line = Line(maxindex + 1, embed_dim=args.dimension, order=args.order) opt = optim.SGD(line.parameters(), lr=args.learning_rate, momentum=0.9, nesterov=True) device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") lossdata = {"it": [], "loss": []} it = 0 print("\nTraining on {}...\n".format(device)) for epoch in range(args.epochs): print("Epoch {}".format(epoch)) for b in trange(batchrange): samplededges = edgesaliassampler.sample_n(args.batchsize) batch = list(makeData(samplededges, args.negsamplesize, weights, nodedegrees, nodesaliassampler)) batch = torch.LongTensor(batch) v_i = batch[:, 0] v_j = batch[:, 1] negsamples = batch[:, 2:] line.zero_grad() loss = line(v_i, v_j, negsamples, device) loss.backward() opt.step() lossdata["loss"].append(loss.item()) lossdata["it"].append(it) it += 1 print("\nDone training, saving model to {}".format(args.save_path)) torch.save(line, "{}".format(args.save_path)) print("Saving loss data at {}".format(args.lossdata_path)) with open(args.lossdata_path, "wb") as ldata: pickle.dump(lossdata, ldata) sys.exit()
36.932432
89
0.631906
import argparse from utils.utils import * from utils.line import Line from tqdm import trange import torch import torch.optim as optim import sys import pickle if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("-g", "--graph_path", type=str) parser.add_argument("-save", "--save_path", type=str) parser.add_argument("-lossdata", "--lossdata_path", type=str) # Hyperparams. parser.add_argument("-order", "--order", type=int, default=2) parser.add_argument("-neg", "--negsamplesize", type=int, default=5) parser.add_argument("-dim", "--dimension", type=int, default=128) parser.add_argument("-batchsize", "--batchsize", type=int, default=5) parser.add_argument("-epochs", "--epochs", type=int, default=1) parser.add_argument("-lr", "--learning_rate", type=float, default=0.025) # As starting value in paper parser.add_argument("-negpow", "--negativepower", type=float, default=0.75) args = parser.parse_args() # Create dict of distribution when opening file edgedistdict, nodedistdict, weights, nodedegrees, maxindex = makeDist( args.graph_path, args.negativepower) edgesaliassampler = VoseAlias(edgedistdict) nodesaliassampler = VoseAlias(nodedistdict) batchrange = int(len(edgedistdict) / args.batchsize) print(maxindex) line = Line(maxindex + 1, embed_dim=args.dimension, order=args.order) opt = optim.SGD(line.parameters(), lr=args.learning_rate, momentum=0.9, nesterov=True) device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") lossdata = {"it": [], "loss": []} it = 0 print("\nTraining on {}...\n".format(device)) for epoch in range(args.epochs): print("Epoch {}".format(epoch)) for b in trange(batchrange): samplededges = edgesaliassampler.sample_n(args.batchsize) batch = list(makeData(samplededges, args.negsamplesize, weights, nodedegrees, nodesaliassampler)) batch = torch.LongTensor(batch) v_i = batch[:, 0] v_j = batch[:, 1] negsamples = batch[:, 2:] line.zero_grad() loss = line(v_i, v_j, negsamples, device) loss.backward() opt.step() lossdata["loss"].append(loss.item()) lossdata["it"].append(it) it += 1 print("\nDone training, saving model to {}".format(args.save_path)) torch.save(line, "{}".format(args.save_path)) print("Saving loss data at {}".format(args.lossdata_path)) with open(args.lossdata_path, "wb") as ldata: pickle.dump(lossdata, ldata) sys.exit()
0
0
e350a391ac04e2b8526a0c505584efa8bd49131b
13,994
py
Python
sdtv4/SDT4Parser.py
Homegateway/SDTTool
97e698ce3078595a6755ec0b599838dc903eaa3d
[ "Apache-2.0" ]
2
2018-05-14T16:00:23.000Z
2018-12-26T14:02:51.000Z
sdtv4/SDT4Parser.py
Homegateway/SDTTool
97e698ce3078595a6755ec0b599838dc903eaa3d
[ "Apache-2.0" ]
null
null
null
sdtv4/SDT4Parser.py
Homegateway/SDTTool
97e698ce3078595a6755ec0b599838dc903eaa3d
[ "Apache-2.0" ]
2
2016-09-05T09:24:41.000Z
2020-06-23T14:05:45.000Z
# SDT4Parser.py # # Callback target class for the ElementTree parser to parse a SDT4 from .SDT4Classes import * class SDT4Parser: # Define the element tags of the SDT4 actionTag = 'action' actionsTag = 'actions' argTag = 'arg' argsTag = 'args' arrayTypeTag = 'array' constraintTag = 'constraint' constraintsTag = 'constraints' dataPointTag = 'datapoint' dataTag = 'data' dataTypeTag = 'datatype' dataTypesTag = 'datatypes' deviceClassTag = 'deviceclass' deviceClassesTag = 'deviceclasses' domainTag = 'domain' enumTypeTag = 'enum' enumValueTag = 'enumvalue' eventTag = 'event' eventsTag = 'events' excludeTag = 'exclude' extendDeviceTag = 'extenddevice' extendTag = 'extend' importsTag = 'imports' includeTag = 'include' moduleClassTag = 'moduleclass' moduleClassesTag = 'moduleclasses' productClassTag = 'productclass' productClassesTag = 'productclasses' propertiesTag = 'properties' propertyTag = 'property' simpleTypeTag = 'simple' structTypeTag = 'struct' subDeviceTag = 'subdevice' subDevicesTag = 'subdevices' # Document tags docTag = 'doc' ttTag = 'tt' emTag = 'em' bTag = 'b' pTag = 'p' imgTag = 'img' imgCaptionTag = 'caption' def __init__(self): self.elementStack = [] self.nameSpaces = [] self.domain = None def start(self, tag, attrib): # First add the name space to the list of used name spaces uri, ignore, otag = tag[1:].partition("}") if uri not in self.nameSpaces: self.nameSpaces.append(uri) ntag = otag.lower() # Check non-emptyness of attributes for at in attrib: if len(attrib[at].strip()) == 0: raise SyntaxError('empty attribute: ' + at + ' for element ' + tag) # Handle all elements # The lastElem always contains the last element on the stack and is # used transparently in the code below. lastElem = self.elementStack[-1] if len(self.elementStack) > 0 else None # Call the handler function for that element tag. # First, chech whether this is allowed for the current parent, or raise an exception if ntag in handlers: (func, instances) = handlers[ntag] if instances is None or isinstance(lastElem, instances): func(attrib, lastElem, self.elementStack) else: raise SyntaxError('%s definition is only allowed in %s elements' % (otag, [v._name for v in instances])) # Other tags to ignore / just containers elif ntag in (SDT4Parser.actionsTag, SDT4Parser.argsTag, SDT4Parser.constraintsTag, SDT4Parser.dataTag, SDT4Parser.dataTypesTag, SDT4Parser.deviceClassesTag, SDT4Parser.eventsTag, SDT4Parser.extendDeviceTag, SDT4Parser.importsTag, SDT4Parser.moduleClassesTag, SDT4Parser.productClassesTag, SDT4Parser.propertiesTag, SDT4Parser.subDevicesTag): pass # Encountered an unknwon element else: raise SyntaxError('Unknown Element: %s %s' % (tag, attrib)) def end(self, tag): uri, ignore, ntag = tag[1:].partition("}") ntag = ntag.lower() if ntag == SDT4Parser.domainTag: self.domain = self.elementStack.pop() # Assign the domain to the parser as result elif ntag in (SDT4Parser.actionTag, SDT4Parser.argTag, SDT4Parser.arrayTypeTag, SDT4Parser.bTag, SDT4Parser.constraintTag, SDT4Parser.eventTag, SDT4Parser.deviceClassTag, SDT4Parser.dataPointTag, SDT4Parser.dataTypeTag, SDT4Parser.docTag, SDT4Parser.emTag, SDT4Parser.enumTypeTag, SDT4Parser.enumValueTag, SDT4Parser.extendTag, SDT4Parser.imgTag, SDT4Parser.imgCaptionTag, SDT4Parser.moduleClassTag, SDT4Parser.pTag, SDT4Parser.productClassTag, SDT4Parser.propertyTag, SDT4Parser.simpleTypeTag, SDT4Parser.structTypeTag, SDT4Parser.subDeviceTag, SDT4Parser.ttTag): obj = self.elementStack.pop() obj.endElement() else: # ignore others pass def data(self, data): if len(self.elementStack) < 1: return if isinstance(self.elementStack[-1], SDT4Doc): obj = self.elementStack[-1] obj.addContent(' ' + ' '.join(data.split())) elif isinstance(self.elementStack[-1], (SDT4DocTT, SDT4DocEM, SDT4DocB, SDT4DocP, SDT4DocIMG, SDT4DocCaption)): obj = self.elementStack[-1] obj.addContent(' '.join(data.split())) def close(self): # ignore end of file pass def comment(self, data): # ignore comments pass def getAttribute(attrib, attribName): return attrib[attribName].strip() if attribName in attrib else None # # Hanlder for each of the element types # def handleAction(attrib, lastElem, elementStack): action = SDT4Action() action.name = getAttribute(attrib, 'name') action.optional = getAttribute(attrib, 'optional') action.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.actions.append(action) elementStack.append(action) def handleArg(attrib, lastElem, elementStack): arg = SDT4Arg() arg.name = getAttribute(attrib, 'name') arg.optional = getAttribute(attrib, 'optional') arg.default = getAttribute(attrib, 'default') arg.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.args.append(arg) elementStack.append(arg) def handleArrayType(attrib, lastElem, elementStack): arrayType = SDT4ArrayType() lastElem.type = arrayType elementStack.append(arrayType) def handleB(attrib, lastElem, elementStack): b = SDT4DocB() b.doc = lastElem.doc elementStack.append(b) def handleConstraint(attrib, lastElem, elementStack): constraint = SDT4Constraint() constraint.name = getAttribute(attrib, 'name') constraint.type = getAttribute(attrib, 'type') constraint.value = getAttribute(attrib, 'value') constraint.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.constraints.append(constraint) elementStack.append(constraint) def handleDataPoint(attrib, lastElem, elementStack): dataPoint = SDT4DataPoint() dataPoint.name = getAttribute(attrib, 'name') dataPoint.optional = getAttribute(attrib, 'optional') dataPoint.writable = getAttribute(attrib, 'writable') dataPoint.readable = getAttribute(attrib, 'readable') dataPoint.eventable = getAttribute(attrib, 'eventable') dataPoint.default = getAttribute(attrib, 'default') dataPoint.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.data.append(dataPoint) elementStack.append(dataPoint) def handleDataType(attrib, lastElem, elementStack): dataType = SDT4DataType() dataType.name = getAttribute(attrib, 'name') dataType.unitOfMeasure = getAttribute(attrib, 'unitOfMeasure') dataType.semanticURI = getAttribute(attrib, 'semanticURI') if isinstance(lastElem, SDT4ArrayType): lastElem.arrayType = dataType elif isinstance(lastElem, SDT4StructType): lastElem.structElements.append(dataType) elif isinstance(lastElem, SDT4Domain): # DataTypes in Domain lastElem.dataTypes.append(dataType) else: lastElem.type = dataType elementStack.append(dataType) def handleDeviceClass(attrib, lastElem, elementStack): device = SDT4DeviceClass() device.id = getAttribute(attrib, 'id') device.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.deviceClasses.append(device) elementStack.append(device) def handleDoc(attrib, lastElem, elementStack): doc = SDT4Doc() lastElem.doc = doc elementStack.append(doc) def handleDomain(attrib, lastElem, elementStack): domain = SDT4Domain() domain.id = getAttribute(attrib, 'id') domain.semanticURI = getAttribute(attrib, 'semanticURI') elementStack.append(domain) def handleEM(attrib, lastElem, elementStack): em = SDT4DocEM() em.doc = lastElem.doc elementStack.append(em) def handleEnumType(attrib, lastElem, elementStack): enumType = SDT4EnumType() lastElem.type = enumType elementStack.append(enumType) def handleEnumValue(attrib, lastElem, elementStack): value = SDT4EnumValue() value.name = getAttribute(attrib, 'name') value.value = getAttribute(attrib, 'value') value.type = getAttribute(attrib, 'type') value.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.enumValues.append(value) elementStack.append(value) def handleEvent(attrib, lastElem, elementStack): event = SDT4Event() event.name = getAttribute(attrib, 'name') event.optional = getAttribute(attrib, 'optional') event.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.events.append(event) elementStack.append(event) def handleExtendExclude(attrib, lastElem, elementStack): exclude = SDT4ExtendExclude() exclude.name = getAttribute(attrib, 'name') exclude.type = getAttribute(attrib, 'type') lastElem.excludes.append(exclude) def handleExtend(attrib, lastElem, elementStack): extend = SDT4Extend() extend.domain = getAttribute(attrib, 'domain') extend.entity = getAttribute(attrib, 'entity') if isinstance(lastElem, SDT4ProductClass): # for ProductClass lastElem.extendDevice = extend else: # normal extend lastElem.extend = extend elementStack.append(extend) def handleImg(attrib, lastElem, elementStack): img = SDT4DocIMG() img.doc = lastElem.doc img.startImage(getAttribute(attrib, 'src')) elementStack.append(img) def handleImgCaption(attrib, lastElem, elementStack): caption = SDT4DocCaption() caption.doc = lastElem.doc elementStack.append(caption) def handleInclude(attrib, lastElem, elementStack): # Unfortunately, there are two "include" element types to handle if isinstance(lastElem, SDT4Extend): include = SDT4ExtendInclude() include.name = getAttribute(attrib, 'name') include.type = getAttribute(attrib, 'type') lastElem.excludes.append(include) else: include = SDT4Include() include.parse = getAttribute(attrib, 'parse') include.href = getAttribute(attrib, 'href') lastElem.includes.append(include) def handleModuleClass(attrib, lastElem, elementStack): mc = SDT4ModuleClass() mc.name = getAttribute(attrib, 'name') mc.semanticURI = getAttribute(attrib, 'semanticURI') mc.minOccurs = getAttribute(attrib, 'minOccurs') mc.maxOccurs = getAttribute(attrib, 'maxOccurs') lastElem.moduleClasses.append(mc) elementStack.append(mc) def handleP(attrib, lastElem, elementStack): p = SDT4DocP() p.doc = lastElem.doc p.startParagraph() elementStack.append(p) def handleProductClass(attrib, lastElem, elementStack): product = SDT4ProductClass() product.id = getAttribute(attrib, 'name') product.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.productClasses.append(product) elementStack.append(product) def handleProperty(attrib, lastElem, elementStack): prop = SDT4Property() prop.name = getAttribute(attrib, 'name') prop.optional = getAttribute(attrib, 'optional') prop.value = getAttribute(attrib, 'value') prop.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.properties.append(prop) elementStack.append(prop) def handleSimpleType(attrib, lastElem, elementStack): simpleType = SDT4SimpleType() simpleType.type = getAttribute(attrib, 'type') lastElem.type = simpleType elementStack.append(simpleType) def handleStructType(attrib, lastElem, elementStack): structType = SDT4StructType() lastElem.type = structType self.elementStack.append(structType) def handleSubDevice(attrib, lastElem, elementStack): subDevice = SDT4SubDevice() subDevice.id = getAttribute(attrib, 'id') subDevice.semanticURI = getAttribute(attrib, 'semanticURI') subDevice.minOccurs = getAttribute(attrib, 'minOccurs') subDevice.maxOccurs = getAttribute(attrib, 'maxOccurs') lastElem.subDevices.append(subDevice) elementStack.append(subDevice) def handleTT(attrib, lastElem, elementStack): tt = SDT4DocTT() tt.doc = lastElem.doc elementStack.append(tt) # # Assignment of element types and (handlerFunction, (tuple of allowed parents)) # handlers = { SDT4Parser.actionTag : (handleAction, (SDT4ModuleClass,)), SDT4Parser.argTag : (handleArg, (SDT4Action,)), SDT4Parser.arrayTypeTag : (handleArrayType, (SDT4DataType,)), SDT4Parser.bTag : (handleB, (SDT4Doc, SDT4DocP)), SDT4Parser.constraintTag : (handleConstraint, (SDT4DataType,)), SDT4Parser.dataPointTag : (handleDataPoint, (SDT4Event, SDT4ModuleClass)), SDT4Parser.dataTypeTag : (handleDataType, (SDT4Action, SDT4DataPoint, SDT4Event, SDT4Arg, SDT4StructType, SDT4ArrayType, SDT4Domain)), SDT4Parser.deviceClassTag : (handleDeviceClass, (SDT4Domain,)), SDT4Parser.docTag : (handleDoc, (SDT4Domain, SDT4ProductClass, SDT4DeviceClass, SDT4SubDevice, SDT4DataType, SDT4ModuleClass, SDT4Action, SDT4DataPoint, SDT4Event, SDT4EnumValue, SDT4Arg, SDT4Constraint, SDT4Property)), SDT4Parser.domainTag : (handleDomain, None), SDT4Parser.emTag : (handleEM, (SDT4Doc, SDT4DocP)), SDT4Parser.enumTypeTag : (handleEnumType, (SDT4DataType,)), SDT4Parser.enumValueTag : (handleEnumValue, (SDT4EnumType,)), SDT4Parser.eventTag : (handleEvent, (SDT4ModuleClass,)), SDT4Parser.excludeTag : (handleExtendExclude, (SDT4Extend,)), SDT4Parser.extendTag : (handleExtend, (SDT4ModuleClass, SDT4DataType, SDT4ProductClass, SDT4SubDevice)), SDT4Parser.imgTag : (handleImg, (SDT4Doc, SDT4DocP)), SDT4Parser.imgCaptionTag : (handleImgCaption, (SDT4DocIMG,)), SDT4Parser.includeTag : (handleInclude, (SDT4Domain, SDT4Extend)), SDT4Parser.moduleClassTag : (handleModuleClass, (SDT4Domain, SDT4ProductClass, SDT4DeviceClass, SDT4SubDevice, SDT4ProductClass)), SDT4Parser.pTag : (handleP, (SDT4Doc, SDT4DocP)), SDT4Parser.productClassTag : (handleProductClass, (SDT4Domain,)), SDT4Parser.propertyTag : (handleProperty, (SDT4ProductClass, SDT4DeviceClass, SDT4SubDevice, SDT4ModuleClass)), SDT4Parser.simpleTypeTag : (handleSimpleType, (SDT4DataType, SDT4Property)), SDT4Parser.structTypeTag : (handleStructType, (SDT4DataType,)), SDT4Parser.subDeviceTag : (handleSubDevice, (SDT4DeviceClass, SDT4ProductClass, SDT4Domain)), SDT4Parser.ttTag : (handleTT, (SDT4Doc, SDT4DocP)) }
31.376682
223
0.736101
# SDT4Parser.py # # Callback target class for the ElementTree parser to parse a SDT4 from .SDT4Classes import * class SDT4Parser: # Define the element tags of the SDT4 actionTag = 'action' actionsTag = 'actions' argTag = 'arg' argsTag = 'args' arrayTypeTag = 'array' constraintTag = 'constraint' constraintsTag = 'constraints' dataPointTag = 'datapoint' dataTag = 'data' dataTypeTag = 'datatype' dataTypesTag = 'datatypes' deviceClassTag = 'deviceclass' deviceClassesTag = 'deviceclasses' domainTag = 'domain' enumTypeTag = 'enum' enumValueTag = 'enumvalue' eventTag = 'event' eventsTag = 'events' excludeTag = 'exclude' extendDeviceTag = 'extenddevice' extendTag = 'extend' importsTag = 'imports' includeTag = 'include' moduleClassTag = 'moduleclass' moduleClassesTag = 'moduleclasses' productClassTag = 'productclass' productClassesTag = 'productclasses' propertiesTag = 'properties' propertyTag = 'property' simpleTypeTag = 'simple' structTypeTag = 'struct' subDeviceTag = 'subdevice' subDevicesTag = 'subdevices' # Document tags docTag = 'doc' ttTag = 'tt' emTag = 'em' bTag = 'b' pTag = 'p' imgTag = 'img' imgCaptionTag = 'caption' def __init__(self): self.elementStack = [] self.nameSpaces = [] self.domain = None def start(self, tag, attrib): # First add the name space to the list of used name spaces uri, ignore, otag = tag[1:].partition("}") if uri not in self.nameSpaces: self.nameSpaces.append(uri) ntag = otag.lower() # Check non-emptyness of attributes for at in attrib: if len(attrib[at].strip()) == 0: raise SyntaxError('empty attribute: ' + at + ' for element ' + tag) # Handle all elements # The lastElem always contains the last element on the stack and is # used transparently in the code below. lastElem = self.elementStack[-1] if len(self.elementStack) > 0 else None # Call the handler function for that element tag. # First, chech whether this is allowed for the current parent, or raise an exception if ntag in handlers: (func, instances) = handlers[ntag] if instances is None or isinstance(lastElem, instances): func(attrib, lastElem, self.elementStack) else: raise SyntaxError('%s definition is only allowed in %s elements' % (otag, [v._name for v in instances])) # Other tags to ignore / just containers elif ntag in (SDT4Parser.actionsTag, SDT4Parser.argsTag, SDT4Parser.constraintsTag, SDT4Parser.dataTag, SDT4Parser.dataTypesTag, SDT4Parser.deviceClassesTag, SDT4Parser.eventsTag, SDT4Parser.extendDeviceTag, SDT4Parser.importsTag, SDT4Parser.moduleClassesTag, SDT4Parser.productClassesTag, SDT4Parser.propertiesTag, SDT4Parser.subDevicesTag): pass # Encountered an unknwon element else: raise SyntaxError('Unknown Element: %s %s' % (tag, attrib)) def end(self, tag): uri, ignore, ntag = tag[1:].partition("}") ntag = ntag.lower() if ntag == SDT4Parser.domainTag: self.domain = self.elementStack.pop() # Assign the domain to the parser as result elif ntag in (SDT4Parser.actionTag, SDT4Parser.argTag, SDT4Parser.arrayTypeTag, SDT4Parser.bTag, SDT4Parser.constraintTag, SDT4Parser.eventTag, SDT4Parser.deviceClassTag, SDT4Parser.dataPointTag, SDT4Parser.dataTypeTag, SDT4Parser.docTag, SDT4Parser.emTag, SDT4Parser.enumTypeTag, SDT4Parser.enumValueTag, SDT4Parser.extendTag, SDT4Parser.imgTag, SDT4Parser.imgCaptionTag, SDT4Parser.moduleClassTag, SDT4Parser.pTag, SDT4Parser.productClassTag, SDT4Parser.propertyTag, SDT4Parser.simpleTypeTag, SDT4Parser.structTypeTag, SDT4Parser.subDeviceTag, SDT4Parser.ttTag): obj = self.elementStack.pop() obj.endElement() else: # ignore others pass def data(self, data): if len(self.elementStack) < 1: return if isinstance(self.elementStack[-1], SDT4Doc): obj = self.elementStack[-1] obj.addContent(' ' + ' '.join(data.split())) elif isinstance(self.elementStack[-1], (SDT4DocTT, SDT4DocEM, SDT4DocB, SDT4DocP, SDT4DocIMG, SDT4DocCaption)): obj = self.elementStack[-1] obj.addContent(' '.join(data.split())) def close(self): # ignore end of file pass def comment(self, data): # ignore comments pass def getAttribute(attrib, attribName): return attrib[attribName].strip() if attribName in attrib else None # # Hanlder for each of the element types # def handleAction(attrib, lastElem, elementStack): action = SDT4Action() action.name = getAttribute(attrib, 'name') action.optional = getAttribute(attrib, 'optional') action.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.actions.append(action) elementStack.append(action) def handleArg(attrib, lastElem, elementStack): arg = SDT4Arg() arg.name = getAttribute(attrib, 'name') arg.optional = getAttribute(attrib, 'optional') arg.default = getAttribute(attrib, 'default') arg.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.args.append(arg) elementStack.append(arg) def handleArrayType(attrib, lastElem, elementStack): arrayType = SDT4ArrayType() lastElem.type = arrayType elementStack.append(arrayType) def handleB(attrib, lastElem, elementStack): b = SDT4DocB() b.doc = lastElem.doc elementStack.append(b) def handleConstraint(attrib, lastElem, elementStack): constraint = SDT4Constraint() constraint.name = getAttribute(attrib, 'name') constraint.type = getAttribute(attrib, 'type') constraint.value = getAttribute(attrib, 'value') constraint.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.constraints.append(constraint) elementStack.append(constraint) def handleDataPoint(attrib, lastElem, elementStack): dataPoint = SDT4DataPoint() dataPoint.name = getAttribute(attrib, 'name') dataPoint.optional = getAttribute(attrib, 'optional') dataPoint.writable = getAttribute(attrib, 'writable') dataPoint.readable = getAttribute(attrib, 'readable') dataPoint.eventable = getAttribute(attrib, 'eventable') dataPoint.default = getAttribute(attrib, 'default') dataPoint.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.data.append(dataPoint) elementStack.append(dataPoint) def handleDataType(attrib, lastElem, elementStack): dataType = SDT4DataType() dataType.name = getAttribute(attrib, 'name') dataType.unitOfMeasure = getAttribute(attrib, 'unitOfMeasure') dataType.semanticURI = getAttribute(attrib, 'semanticURI') if isinstance(lastElem, SDT4ArrayType): lastElem.arrayType = dataType elif isinstance(lastElem, SDT4StructType): lastElem.structElements.append(dataType) elif isinstance(lastElem, SDT4Domain): # DataTypes in Domain lastElem.dataTypes.append(dataType) else: lastElem.type = dataType elementStack.append(dataType) def handleDeviceClass(attrib, lastElem, elementStack): device = SDT4DeviceClass() device.id = getAttribute(attrib, 'id') device.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.deviceClasses.append(device) elementStack.append(device) def handleDoc(attrib, lastElem, elementStack): doc = SDT4Doc() lastElem.doc = doc elementStack.append(doc) def handleDomain(attrib, lastElem, elementStack): domain = SDT4Domain() domain.id = getAttribute(attrib, 'id') domain.semanticURI = getAttribute(attrib, 'semanticURI') elementStack.append(domain) def handleEM(attrib, lastElem, elementStack): em = SDT4DocEM() em.doc = lastElem.doc elementStack.append(em) def handleEnumType(attrib, lastElem, elementStack): enumType = SDT4EnumType() lastElem.type = enumType elementStack.append(enumType) def handleEnumValue(attrib, lastElem, elementStack): value = SDT4EnumValue() value.name = getAttribute(attrib, 'name') value.value = getAttribute(attrib, 'value') value.type = getAttribute(attrib, 'type') value.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.enumValues.append(value) elementStack.append(value) def handleEvent(attrib, lastElem, elementStack): event = SDT4Event() event.name = getAttribute(attrib, 'name') event.optional = getAttribute(attrib, 'optional') event.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.events.append(event) elementStack.append(event) def handleExtendExclude(attrib, lastElem, elementStack): exclude = SDT4ExtendExclude() exclude.name = getAttribute(attrib, 'name') exclude.type = getAttribute(attrib, 'type') lastElem.excludes.append(exclude) def handleExtend(attrib, lastElem, elementStack): extend = SDT4Extend() extend.domain = getAttribute(attrib, 'domain') extend.entity = getAttribute(attrib, 'entity') if isinstance(lastElem, SDT4ProductClass): # for ProductClass lastElem.extendDevice = extend else: # normal extend lastElem.extend = extend elementStack.append(extend) def handleImg(attrib, lastElem, elementStack): img = SDT4DocIMG() img.doc = lastElem.doc img.startImage(getAttribute(attrib, 'src')) elementStack.append(img) def handleImgCaption(attrib, lastElem, elementStack): caption = SDT4DocCaption() caption.doc = lastElem.doc elementStack.append(caption) def handleInclude(attrib, lastElem, elementStack): # Unfortunately, there are two "include" element types to handle if isinstance(lastElem, SDT4Extend): include = SDT4ExtendInclude() include.name = getAttribute(attrib, 'name') include.type = getAttribute(attrib, 'type') lastElem.excludes.append(include) else: include = SDT4Include() include.parse = getAttribute(attrib, 'parse') include.href = getAttribute(attrib, 'href') lastElem.includes.append(include) def handleModuleClass(attrib, lastElem, elementStack): mc = SDT4ModuleClass() mc.name = getAttribute(attrib, 'name') mc.semanticURI = getAttribute(attrib, 'semanticURI') mc.minOccurs = getAttribute(attrib, 'minOccurs') mc.maxOccurs = getAttribute(attrib, 'maxOccurs') lastElem.moduleClasses.append(mc) elementStack.append(mc) def handleP(attrib, lastElem, elementStack): p = SDT4DocP() p.doc = lastElem.doc p.startParagraph() elementStack.append(p) def handleProductClass(attrib, lastElem, elementStack): product = SDT4ProductClass() product.id = getAttribute(attrib, 'name') product.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.productClasses.append(product) elementStack.append(product) def handleProperty(attrib, lastElem, elementStack): prop = SDT4Property() prop.name = getAttribute(attrib, 'name') prop.optional = getAttribute(attrib, 'optional') prop.value = getAttribute(attrib, 'value') prop.semanticURI = getAttribute(attrib, 'semanticURI') lastElem.properties.append(prop) elementStack.append(prop) def handleSimpleType(attrib, lastElem, elementStack): simpleType = SDT4SimpleType() simpleType.type = getAttribute(attrib, 'type') lastElem.type = simpleType elementStack.append(simpleType) def handleStructType(attrib, lastElem, elementStack): structType = SDT4StructType() lastElem.type = structType self.elementStack.append(structType) def handleSubDevice(attrib, lastElem, elementStack): subDevice = SDT4SubDevice() subDevice.id = getAttribute(attrib, 'id') subDevice.semanticURI = getAttribute(attrib, 'semanticURI') subDevice.minOccurs = getAttribute(attrib, 'minOccurs') subDevice.maxOccurs = getAttribute(attrib, 'maxOccurs') lastElem.subDevices.append(subDevice) elementStack.append(subDevice) def handleTT(attrib, lastElem, elementStack): tt = SDT4DocTT() tt.doc = lastElem.doc elementStack.append(tt) # # Assignment of element types and (handlerFunction, (tuple of allowed parents)) # handlers = { SDT4Parser.actionTag : (handleAction, (SDT4ModuleClass,)), SDT4Parser.argTag : (handleArg, (SDT4Action,)), SDT4Parser.arrayTypeTag : (handleArrayType, (SDT4DataType,)), SDT4Parser.bTag : (handleB, (SDT4Doc, SDT4DocP)), SDT4Parser.constraintTag : (handleConstraint, (SDT4DataType,)), SDT4Parser.dataPointTag : (handleDataPoint, (SDT4Event, SDT4ModuleClass)), SDT4Parser.dataTypeTag : (handleDataType, (SDT4Action, SDT4DataPoint, SDT4Event, SDT4Arg, SDT4StructType, SDT4ArrayType, SDT4Domain)), SDT4Parser.deviceClassTag : (handleDeviceClass, (SDT4Domain,)), SDT4Parser.docTag : (handleDoc, (SDT4Domain, SDT4ProductClass, SDT4DeviceClass, SDT4SubDevice, SDT4DataType, SDT4ModuleClass, SDT4Action, SDT4DataPoint, SDT4Event, SDT4EnumValue, SDT4Arg, SDT4Constraint, SDT4Property)), SDT4Parser.domainTag : (handleDomain, None), SDT4Parser.emTag : (handleEM, (SDT4Doc, SDT4DocP)), SDT4Parser.enumTypeTag : (handleEnumType, (SDT4DataType,)), SDT4Parser.enumValueTag : (handleEnumValue, (SDT4EnumType,)), SDT4Parser.eventTag : (handleEvent, (SDT4ModuleClass,)), SDT4Parser.excludeTag : (handleExtendExclude, (SDT4Extend,)), SDT4Parser.extendTag : (handleExtend, (SDT4ModuleClass, SDT4DataType, SDT4ProductClass, SDT4SubDevice)), SDT4Parser.imgTag : (handleImg, (SDT4Doc, SDT4DocP)), SDT4Parser.imgCaptionTag : (handleImgCaption, (SDT4DocIMG,)), SDT4Parser.includeTag : (handleInclude, (SDT4Domain, SDT4Extend)), SDT4Parser.moduleClassTag : (handleModuleClass, (SDT4Domain, SDT4ProductClass, SDT4DeviceClass, SDT4SubDevice, SDT4ProductClass)), SDT4Parser.pTag : (handleP, (SDT4Doc, SDT4DocP)), SDT4Parser.productClassTag : (handleProductClass, (SDT4Domain,)), SDT4Parser.propertyTag : (handleProperty, (SDT4ProductClass, SDT4DeviceClass, SDT4SubDevice, SDT4ModuleClass)), SDT4Parser.simpleTypeTag : (handleSimpleType, (SDT4DataType, SDT4Property)), SDT4Parser.structTypeTag : (handleStructType, (SDT4DataType,)), SDT4Parser.subDeviceTag : (handleSubDevice, (SDT4DeviceClass, SDT4ProductClass, SDT4Domain)), SDT4Parser.ttTag : (handleTT, (SDT4Doc, SDT4DocP)) }
0
0
de130291f9918c171aaa53459b784a3e93f4b849
1,698
py
Python
tools/validator.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
8
2019-02-02T01:15:57.000Z
2021-12-23T04:43:46.000Z
tools/validator.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
null
null
null
tools/validator.py
adacker10/showdown
8ceb1ff46d5c33ec3055928d6ad293224446f63c
[ "MIT" ]
6
2020-09-11T13:15:05.000Z
2022-03-18T15:46:35.000Z
from data import dex import re class InValidSetError(Exception): def __init__(self, message): self.message = message def validate_team(team): ''' team is an array of six pokemon sets ''' if len(team) > 6: raise InValidSetError("more than 6 pokemon") pokemon_names = set() for pokemon in team: # check if the pokemon is an actual pokemon species = re.sub(r'\W+', '', pokemon['species'].lower()) pokemon_names.add(species) if species not in dex.pokedex: raise InValidSetError(species + " is not a real pokemon species") if len(pokemon['moves']) > 4: raise InValidSetError("more than 4 moves") for move in pokemon['moves']: if move not in dex.simple_learnsets[species]: raise InValidSetError(species + " can't learn the move " + move) if pokemon['ability'] not in [re.sub(r'\W+', '', ability.lower()) for ability in list(filter(None.__ne__, list(dex.pokedex[species].abilities)))]: raise InValidSetError(species + " cant have the ability, " + pokemon['ability']) for i in range(6): if pokemon['evs'][i] > 255 or pokemon['evs'][i] < 0: raise InVaidSetError("ev value is out of range: " + str(pokemon['evs'][i])) if pokemon['ivs'][i] > 31 or pokemon['ivs'][i] < 0: raise InVaidSetError("iv value is out of range: " + str(pokemon['ivs'][i])) if sum(pokemon['evs']) > 510: raise InValidSetError("sum of evs is over 510") if len(team) != len(pokemon_names): raise InValidSetError("cannot have multiple of the same pokemon") return True
40.428571
154
0.602473
from data import dex import re class InValidSetError(Exception): def __init__(self, message): self.message = message def validate_team(team): ''' team is an array of six pokemon sets ''' if len(team) > 6: raise InValidSetError("more than 6 pokemon") pokemon_names = set() for pokemon in team: # check if the pokemon is an actual pokemon species = re.sub(r'\W+', '', pokemon['species'].lower()) pokemon_names.add(species) if species not in dex.pokedex: raise InValidSetError(species + " is not a real pokemon species") if len(pokemon['moves']) > 4: raise InValidSetError("more than 4 moves") for move in pokemon['moves']: if move not in dex.simple_learnsets[species]: raise InValidSetError(species + " can't learn the move " + move) if pokemon['ability'] not in [re.sub(r'\W+', '', ability.lower()) for ability in list(filter(None.__ne__, list(dex.pokedex[species].abilities)))]: raise InValidSetError(species + " cant have the ability, " + pokemon['ability']) for i in range(6): if pokemon['evs'][i] > 255 or pokemon['evs'][i] < 0: raise InVaidSetError("ev value is out of range: " + str(pokemon['evs'][i])) if pokemon['ivs'][i] > 31 or pokemon['ivs'][i] < 0: raise InVaidSetError("iv value is out of range: " + str(pokemon['ivs'][i])) if sum(pokemon['evs']) > 510: raise InValidSetError("sum of evs is over 510") if len(team) != len(pokemon_names): raise InValidSetError("cannot have multiple of the same pokemon") return True
0
0
bceb90c866742318115d3897625ab3cd17dad9ae
1,782
py
Python
abfs/group_data_split.py
rcdilorenzo/abfs
a897d00a4589a9412a9b9e737f8db91df008fc26
[ "MIT" ]
7
2019-03-13T17:22:50.000Z
2022-01-09T09:03:16.000Z
abfs/group_data_split.py
rcdilorenzo/abfs
a897d00a4589a9412a9b9e737f8db91df008fc26
[ "MIT" ]
1
2019-08-01T23:42:09.000Z
2019-08-02T16:14:31.000Z
abfs/group_data_split.py
rcdilorenzo/abfs
a897d00a4589a9412a9b9e737f8db91df008fc26
[ "MIT" ]
2
2020-09-12T06:33:16.000Z
2021-01-01T01:05:48.000Z
from collections import namedtuple as Struct from sklearn.model_selection import GroupShuffleSplit, ShuffleSplit DataSplitConfig = Struct('DataSplitConfig', ['validation_size', 'test_size', 'random_seed']) DEFAULT_SPLIT_CONFIG = DataSplitConfig(0.2, 0.2, 1337) class GroupDataSplit(): def __init__(self, df, key, config=DEFAULT_SPLIT_CONFIG): self.config = config self.key = key self._df = df self._split_data() @property def total(self): """Total records in the data frame""" return len(self._df) def train_df(self): """Randomized train data frame""" return self._train_df.sample(frac=1).reset_index(drop=True) @property def val_df(self): """Validation data frame""" return self._val_df @property def test_df(self): """Test data frame""" return self._test_df @property def test_split(self): return GroupShuffleSplit(test_size=self.config.test_size, random_state=self.config.random_seed).split @property def val_split(self): val_size = self.config.validation_size / (1 - self.config.test_size) return GroupShuffleSplit(test_size=val_size, random_state=self.config.random_seed).split def _split_data(self): rem_indices, test_indices = next( self.test_split(self._df, groups=self._df[self.key]) ) rem_df = self._df.iloc[rem_indices] train_indices, val_indices = next( self.val_split(rem_df, groups=rem_df[self.key]) ) self._test_df = self._df.iloc[test_indices] self._val_df = rem_df.iloc[val_indices] self._train_df = rem_df.iloc[train_indices]
30.724138
92
0.640292
from collections import namedtuple as Struct from sklearn.model_selection import GroupShuffleSplit, ShuffleSplit DataSplitConfig = Struct('DataSplitConfig', ['validation_size', 'test_size', 'random_seed']) DEFAULT_SPLIT_CONFIG = DataSplitConfig(0.2, 0.2, 1337) class GroupDataSplit(): def __init__(self, df, key, config=DEFAULT_SPLIT_CONFIG): self.config = config self.key = key self._df = df self._split_data() @property def total(self): """Total records in the data frame""" return len(self._df) def train_df(self): """Randomized train data frame""" return self._train_df.sample(frac=1).reset_index(drop=True) @property def val_df(self): """Validation data frame""" return self._val_df @property def test_df(self): """Test data frame""" return self._test_df @property def test_split(self): return GroupShuffleSplit(test_size=self.config.test_size, random_state=self.config.random_seed).split @property def val_split(self): val_size = self.config.validation_size / (1 - self.config.test_size) return GroupShuffleSplit(test_size=val_size, random_state=self.config.random_seed).split def _split_data(self): rem_indices, test_indices = next( self.test_split(self._df, groups=self._df[self.key]) ) rem_df = self._df.iloc[rem_indices] train_indices, val_indices = next( self.val_split(rem_df, groups=rem_df[self.key]) ) self._test_df = self._df.iloc[test_indices] self._val_df = rem_df.iloc[val_indices] self._train_df = rem_df.iloc[train_indices]
0
0
b1412972007124b927dd10c01b84ccee4179e203
656
py
Python
data_extract_code/sex-ratio.py
jaya-shankar/Human-Development-Prediction
cdc7f2186c49db3506267573b05da6ba03cd5bfd
[ "Unlicense" ]
null
null
null
data_extract_code/sex-ratio.py
jaya-shankar/Human-Development-Prediction
cdc7f2186c49db3506267573b05da6ba03cd5bfd
[ "Unlicense" ]
null
null
null
data_extract_code/sex-ratio.py
jaya-shankar/Human-Development-Prediction
cdc7f2186c49db3506267573b05da6ba03cd5bfd
[ "Unlicense" ]
2
2021-11-01T15:48:16.000Z
2021-12-28T07:48:35.000Z
import csv file = open("sex-ratio.csv") csvreader = csv.reader(file) header = next(csvreader) mapped = {} for row in csvreader: if(row[0] not in mapped): mapped[row[0]]={} mapped[row[0]][row[2]] = row[3] # f = open("converted.csv",'w') rows=[] for c in mapped: row = [c] for y in mapped[c]: row.append(mapped[c][y]) rows.append(row) header =['country'] for i in range(1950,2018): header.append(str(i)) with open('converted.csv', 'w', encoding='UTF8') as f: writer = csv.writer(f) # write the header writer.writerow(header) # write the data for row in rows: writer.writerow(row)
18.222222
54
0.599085
import csv file = open("sex-ratio.csv") csvreader = csv.reader(file) header = next(csvreader) mapped = {} for row in csvreader: if(row[0] not in mapped): mapped[row[0]]={} mapped[row[0]][row[2]] = row[3] # f = open("converted.csv",'w') rows=[] for c in mapped: row = [c] for y in mapped[c]: row.append(mapped[c][y]) rows.append(row) header =['country'] for i in range(1950,2018): header.append(str(i)) with open('converted.csv', 'w', encoding='UTF8') as f: writer = csv.writer(f) # write the header writer.writerow(header) # write the data for row in rows: writer.writerow(row)
0
0
31f4a614a97ec199afd65d00866fe66229803029
525
py
Python
lib/flask_api/compat.py
imtiaz-emu/gcp-flask-test
096f466242aa14941712ab8ea06ac4fb4eaeb993
[ "Apache-2.0" ]
null
null
null
lib/flask_api/compat.py
imtiaz-emu/gcp-flask-test
096f466242aa14941712ab8ea06ac4fb4eaeb993
[ "Apache-2.0" ]
null
null
null
lib/flask_api/compat.py
imtiaz-emu/gcp-flask-test
096f466242aa14941712ab8ea06ac4fb4eaeb993
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import # Markdown is optional try: import markdown def apply_markdown(text): """ Simple wrapper around :func:`markdown.markdown` to set the base level of '#' style headers to <h2>. """ extensions = ['headerid(level=2)'] safe_mode = False md = markdown.Markdown(extensions=extensions, safe_mode=safe_mode) return md.convert(text) except ImportError: apply_markdown = None
25
77
0.645714
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import # Markdown is optional try: import markdown def apply_markdown(text): """ Simple wrapper around :func:`markdown.markdown` to set the base level of '#' style headers to <h2>. """ extensions = ['headerid(level=2)'] safe_mode = False md = markdown.Markdown(extensions=extensions, safe_mode=safe_mode) return md.convert(text) except ImportError: apply_markdown = None
0
0
6bff53885f7d7817bb5989d427b6919245c38e01
4,969
py
Python
src/cfdpy/derivative/finiteDifferenceMethods.py
mkihara/cfdpy
53945ddd87f810e65d4fffe3b68f6bf8c06098c2
[ "MIT" ]
1
2022-03-28T03:07:26.000Z
2022-03-28T03:07:26.000Z
src/cfdpy/derivative/finiteDifferenceMethods.py
mkihara/cfdpy
53945ddd87f810e65d4fffe3b68f6bf8c06098c2
[ "MIT" ]
null
null
null
src/cfdpy/derivative/finiteDifferenceMethods.py
mkihara/cfdpy
53945ddd87f810e65d4fffe3b68f6bf8c06098c2
[ "MIT" ]
null
null
null
"""Finite Difference Methods """ import numpy as np def FDMWeights(M, x0, alpha): """Calculate the weights in finite difference formulas for any order of derivative and to any order of accuracy on onedimensional grids with arbitrary spacing. Args: M (int): Order of derivative x0 (float): Approximations at this point alpha (np.array): x-cordinates. length must be N Attributes: N (int): Order of accuracy, which is equivalent to len(alpha)-1. Returns: np.array: Weights References: Bengt Fornberg, "Generation of Finite Difference Formulas on Arbitrarily Spaced Grids", 1988. """ N = len(alpha) - 1 delta = np.zeros([M+1,N+1,N+1]) delta[0,0,0] = 1. c1 = 1. for n in range(1, N+1): c2 = 1. for nu in range(n): c3 = alpha[n] - alpha[nu] c2 *= c3 for m in range(min(n, M)+1): delta[m,n,nu] = ((alpha[n]-x0)*delta[m,n-1,nu] - m*delta[m-1,n-1,nu]) / c3 for m in range(min(n, M)+1): delta[m,n,n] = c1/c2 * (m*delta[m-1,n-1,n-1] - (alpha[n-1]-x0)*delta[m,n-1,n-1]) c1 = c2 return delta class centralFDM(object): """Central Finite Difference Method Args: order (int, optional): The order of the accuracy. Defaults to 2. highestDerivative (int, optional): The order of the highest derivative. Defaults to 1. """ def __init__(self, order:int=2, highestDerivative=1): assert (order % 2) == 0, "order must be even number." assert order > 0, "order must be greater than 0." assert highestDerivative > 0, "highestDerivative must be greater than 0." self.order = order self.highestDerivative = highestDerivative self.nGridPoints = ((self.highestDerivative + 1) // 2) * 2 - 1 + self.order self.set_alpha() self.weight = FDMWeights(M=self.highestDerivative, x0=0, alpha=self.alpha)[:,self.order] def __call__(self, f, axis=-1, derivative=1, h=1.): """Calculate the derivative. Args: f (np.array): An array containing samples. axis (int, optional): The derivative is calculated only along the given axis. Defaults to -1. derivative (int, optional): The order of the derivative. Defaults to 1. h (float, optional): The space of the uniform grid. Defaults to 1.. Returns: np.array: The derivative. """ df = np.zeros_like(f) weight_ = self.weight[derivative] alpha_ = self.alpha[weight_!=0] weight_ = weight_[weight_!=0] for i, alpha_i in enumerate(alpha_): df += np.roll(f, shift=-int(alpha_i), axis=axis) * weight_[i] return df / h**derivative def set_alpha(self): alpha_ = np.arange(self.nGridPoints, dtype=float) alpha_ = self.__infiniteSeries(alpha_) self.alpha = np.cumsum(alpha_) def __infiniteSeries(self, n): return n * (-1)**(n-1) class upwindFDM(object): """Upwind Finite Difference Method Args: order (int, optional): The order of the accuracy. Defaults to 1. highestDerivative (int, optional): The order of the highest derivative. Defaults to 1. """ def __init__(self, order:int=1, highestDerivative:int=1): assert order > 0, "order must be greater than 0." assert highestDerivative > 0, "highestDerivative must be greater than 0." self.order = order self.highestDerivative = highestDerivative self.nGridPoints = self.order+self.highestDerivative self.start = - (self.nGridPoints) // 2 self.alpha = np.arange(start=self.start, stop=self.start+self.nGridPoints) self.weight = FDMWeights(M=self.highestDerivative, x0=0., alpha=self.alpha)[:,self.order] self.weight2 = FDMWeights(M=self.highestDerivative, x0=0., alpha=-self.alpha)[:,self.order] def __call__(self, f, axis=-1, derivative=1, h=1., c=None): """Calculate the derivative. Args: f (np.array): An array containing samples. axis (int, optional): The derivative is calculated only along the given axis. Defaults to -1. derivative (int, optional): The order of the derivative. Defaults to 1. h (float, optional): The space of the uniform grid. Defaults to 1.. c (float or np.array, optional): The advection speed. Defaults to None. Returns: np.array: The derivative. """ df = np.zeros_like(f) df2 = np.zeros_like(f) for i, alpha_i in enumerate(self.alpha): df += np.roll(f, shift=-int(alpha_i), axis=axis) * self.weight[derivative,i] df2 += np.roll(f, shift=int(alpha_i), axis=axis) * self.weight2[derivative,i] if c == None: c = f df = np.where(c>=0, df, df2) return df / h**derivative
38.51938
105
0.603944
"""Finite Difference Methods """ import numpy as np def FDMWeights(M, x0, alpha): """Calculate the weights in finite difference formulas for any order of derivative and to any order of accuracy on onedimensional grids with arbitrary spacing. Args: M (int): Order of derivative x0 (float): Approximations at this point alpha (np.array): x-cordinates. length must be N Attributes: N (int): Order of accuracy, which is equivalent to len(alpha)-1. Returns: np.array: Weights References: Bengt Fornberg, "Generation of Finite Difference Formulas on Arbitrarily Spaced Grids", 1988. """ N = len(alpha) - 1 delta = np.zeros([M+1,N+1,N+1]) delta[0,0,0] = 1. c1 = 1. for n in range(1, N+1): c2 = 1. for nu in range(n): c3 = alpha[n] - alpha[nu] c2 *= c3 for m in range(min(n, M)+1): delta[m,n,nu] = ((alpha[n]-x0)*delta[m,n-1,nu] - m*delta[m-1,n-1,nu]) / c3 for m in range(min(n, M)+1): delta[m,n,n] = c1/c2 * (m*delta[m-1,n-1,n-1] - (alpha[n-1]-x0)*delta[m,n-1,n-1]) c1 = c2 return delta class centralFDM(object): """Central Finite Difference Method Args: order (int, optional): The order of the accuracy. Defaults to 2. highestDerivative (int, optional): The order of the highest derivative. Defaults to 1. """ def __init__(self, order:int=2, highestDerivative=1): assert (order % 2) == 0, "order must be even number." assert order > 0, "order must be greater than 0." assert highestDerivative > 0, "highestDerivative must be greater than 0." self.order = order self.highestDerivative = highestDerivative self.nGridPoints = ((self.highestDerivative + 1) // 2) * 2 - 1 + self.order self.set_alpha() self.weight = FDMWeights(M=self.highestDerivative, x0=0, alpha=self.alpha)[:,self.order] def __call__(self, f, axis=-1, derivative=1, h=1.): """Calculate the derivative. Args: f (np.array): An array containing samples. axis (int, optional): The derivative is calculated only along the given axis. Defaults to -1. derivative (int, optional): The order of the derivative. Defaults to 1. h (float, optional): The space of the uniform grid. Defaults to 1.. Returns: np.array: The derivative. """ df = np.zeros_like(f) weight_ = self.weight[derivative] alpha_ = self.alpha[weight_!=0] weight_ = weight_[weight_!=0] for i, alpha_i in enumerate(alpha_): df += np.roll(f, shift=-int(alpha_i), axis=axis) * weight_[i] return df / h**derivative def set_alpha(self): alpha_ = np.arange(self.nGridPoints, dtype=float) alpha_ = self.__infiniteSeries(alpha_) self.alpha = np.cumsum(alpha_) def __infiniteSeries(self, n): return n * (-1)**(n-1) class upwindFDM(object): """Upwind Finite Difference Method Args: order (int, optional): The order of the accuracy. Defaults to 1. highestDerivative (int, optional): The order of the highest derivative. Defaults to 1. """ def __init__(self, order:int=1, highestDerivative:int=1): assert order > 0, "order must be greater than 0." assert highestDerivative > 0, "highestDerivative must be greater than 0." self.order = order self.highestDerivative = highestDerivative self.nGridPoints = self.order+self.highestDerivative self.start = - (self.nGridPoints) // 2 self.alpha = np.arange(start=self.start, stop=self.start+self.nGridPoints) self.weight = FDMWeights(M=self.highestDerivative, x0=0., alpha=self.alpha)[:,self.order] self.weight2 = FDMWeights(M=self.highestDerivative, x0=0., alpha=-self.alpha)[:,self.order] def __call__(self, f, axis=-1, derivative=1, h=1., c=None): """Calculate the derivative. Args: f (np.array): An array containing samples. axis (int, optional): The derivative is calculated only along the given axis. Defaults to -1. derivative (int, optional): The order of the derivative. Defaults to 1. h (float, optional): The space of the uniform grid. Defaults to 1.. c (float or np.array, optional): The advection speed. Defaults to None. Returns: np.array: The derivative. """ df = np.zeros_like(f) df2 = np.zeros_like(f) for i, alpha_i in enumerate(self.alpha): df += np.roll(f, shift=-int(alpha_i), axis=axis) * self.weight[derivative,i] df2 += np.roll(f, shift=int(alpha_i), axis=axis) * self.weight2[derivative,i] if c == None: c = f df = np.where(c>=0, df, df2) return df / h**derivative
0
0
ff72c44c7b3d8e713ec5a01de73d9db8358095b9
7,381
py
Python
revcar-cli.py
madkaye/revcar-cli
c49a0ae47b1545c81d53d4fb5ddccbc73203caa2
[ "MIT" ]
null
null
null
revcar-cli.py
madkaye/revcar-cli
c49a0ae47b1545c81d53d4fb5ddccbc73203caa2
[ "MIT" ]
null
null
null
revcar-cli.py
madkaye/revcar-cli
c49a0ae47b1545c81d53d4fb5ddccbc73203caa2
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import os import time import datetime from bluepy import btle from bluepy.btle import Scanner, DefaultDelegate, Peripheral, Characteristic, ScanEntry, Service, UUID import curses import curses.textpad from carcontrol import CarControl # Scan timeout in seconds SCAN_TIMEOUT = 10 ## screen parts LINE_HEADING = 0 LINE_OPTIONS = 1 LINE_STATUS = 5 LINE_ERROR = 6 COL_START = 0 HEIGHT_TOP = 8 HEIGHT_BOT = 3 LOOP_DURATION = 0.05 DISPLAY_COUNT = 100 LINE_RECT = 30 RECT_HEIGHT = 12 RECT_WIDTH = 40 MSG_WELCOME = "Welcome to Carmageddon - in real life!\n" MSG_OPTIONS = " [S] - start scanning...\t\t\t\t[Q] - Exit\n" MSG_OPTIONS = MSG_OPTIONS + " [1...9] - Direct connect to device by number\t\t[D] - Disconnect \n" MSG_DRIVE_HELP = "Use [Arrows] to drive, [SPACE] to Fire" class MainScreen: status = 0 lastmsg = None lasterror = None displaycounter = 0 car = CarControl() def __init__(self): curses.wrapper(self.mainloop) def createmidwin(self): win = curses.newwin(curses.LINES - (HEIGHT_TOP + HEIGHT_BOT), curses.COLS, HEIGHT_TOP, COL_START) win.scrollok(True) win.idlok(True) win.addstr(LINE_HEADING, COL_START, "Information:", curses.A_BOLD) win.move(0, 0) win.refresh() return win def createbotwin(self): win = curses.newwin(HEIGHT_BOT, curses.COLS, curses.LINES - HEIGHT_BOT, COL_START) win.addstr(LINE_HEADING, COL_START, "Bot window", curses.A_BOLD) win.move(0, 0) win.refresh() return win def drawheadings(self, window): window.addstr(LINE_HEADING, COL_START, MSG_WELCOME, curses.A_BOLD) window.addstr(LINE_OPTIONS, COL_START, MSG_OPTIONS) window.hline('_', curses.COLS) window.refresh() def resizescreen(self, midwin, botwin): midwin.resize(curses.LINES - (HEIGHT_TOP + HEIGHT_BOT), curses.COLS) botwin.mvwin(curses.LINES - HEIGHT_TOP - 1, COL_START) def updatestatus(self, window, status=0, msg="", error=""): self.status = status self.lastmsg = msg self.lasterror = error if window is None: return statusmsg = "Status: {} - {}".format(self.status, self.lastmsg) errmsg = "Error: {}".format(self.lasterror) if len(self.lasterror) > 0 else "" window.move(LINE_STATUS, COL_START) window.addstr(LINE_STATUS, COL_START, statusmsg) window.clrtoeol() window.move(LINE_ERROR, COL_START) window.addstr(LINE_ERROR, COL_START, errmsg) window.clrtoeol() window.refresh() def countdownstatus(self): self.displaycounter = DISPLAY_COUNT def checkstatus(self): if self.displaycounter > 1 and self.status > 0: self.displaycounter = self.displaycounter - 1 return False elif self.displaycounter == 1 and self.status > 0: self.status = 0 self.displaycounter = 0 return True else: return False def detailline(self, window, msg=""): window.clear() window.move(0, COL_START) window.addstr("{}".format(msg)) window.refresh() window.move(0, COL_START) def debugline(self, window, msg=""): window.move(0, COL_START) window.addstr("dbg: {}".format(msg)) window.clrtoeol() window.refresh() def mainloop(self, stdscr): self.drawheadings(stdscr) self.updatestatus(stdscr) midwin = self.createmidwin() botwin = self.createbotwin() self.debugline(botwin) stdscr.nodelay(True) while True: time.sleep(LOOP_DURATION) if self.checkstatus(): self.updatestatus(stdscr) inchar = stdscr.getch() curses.flushinp() # SCAN if inchar == ord('s') or inchar == ord('S'): self.updatestatus(stdscr, 1, "Scanning...") self.detailline(midwin) if self.car.scan(SCAN_TIMEOUT): self.updatestatus(stdscr, 1, "Scan - Done, found {} devices".format(len(self.car.devices))) else: #self.updatestatus(stdscr, 1, "Scan - Error", "Could not initiate scanning") self.updatestatus(stdscr, 1, "Scan - Error with scan, found {} devices".format(len(self.car.devices))) #self.countdownstatus() self.detailline(midwin, self.car.devicetext) self.debugline(botwin, "{}".format(self.car)) # Connect elif inchar >= ord('1') and inchar <= ord('9'): devnum = inchar - ord('1') + 1 self.debugline(botwin, "Device #{}".format(devnum)) self.updatestatus(stdscr, 2, "Connecting to car #{}...".format(devnum)) if self.car.connect((devnum-1)): self.updatestatus(stdscr, 2, "Connected to car #{} [{}]...".format(devnum, self.car.carName)) self.debugline(botwin, "Sending handshake...") self.car.sendhandshake() self.debugline(botwin, "Sending handshake, Done") self.detailline(midwin, MSG_DRIVE_HELP) else: self.updatestatus(stdscr, 2, "No connection to car #{}...".format(devnum)) self.debugline(botwin, "{}".format(self.car)) # Disconnect elif inchar == ord('d') or inchar == ord('D'): self.updatestatus(stdscr, 3, "Disconnecting...") if self.car.disconnectcar(): self.updatestatus(stdscr, 2, "Disconnect, Done") else: self.updatestatus(stdscr, 2, "Unable to disconnect car") self.detailline(midwin) self.debugline(botwin, "{}".format(self.car)) # Quit elif inchar == ord('q') or inchar == ord('Q'): if self.car.isConnected: self.car.disconnectcar(); break # Movement Actions elif inchar == ord(' '): if self.car.isConnected: self.car.carfiregun() elif inchar == curses.KEY_UP: if self.car.isConnected: self.car.carforward() elif inchar == curses.KEY_DOWN: if self.car.isConnected: self.car.carreverse() elif inchar == curses.KEY_LEFT: if self.car.isConnected: self.car.carleft() elif inchar == curses.KEY_RIGHT: if self.car.isConnected: self.car.carright() elif inchar == curses.KEY_RESIZE: curses.update_lines_cols() self.resizescreen(midwin, botwin) self.debugline(botwin, "resizing") self.drawheadings(stdscr) self.updatestatus(stdscr) elif inchar == curses.ERR or inchar == -1: continue else: continue if __name__ == '__main__': try: screen = MainScreen() except KeyboardInterrupt: os.sys.exit(0) # finally:
34.013825
122
0.562796
#!/usr/bin/env python3 import os import time import datetime from bluepy import btle from bluepy.btle import Scanner, DefaultDelegate, Peripheral, Characteristic, ScanEntry, Service, UUID import curses import curses.textpad from carcontrol import CarControl # Scan timeout in seconds SCAN_TIMEOUT = 10 ## screen parts LINE_HEADING = 0 LINE_OPTIONS = 1 LINE_STATUS = 5 LINE_ERROR = 6 COL_START = 0 HEIGHT_TOP = 8 HEIGHT_BOT = 3 LOOP_DURATION = 0.05 DISPLAY_COUNT = 100 LINE_RECT = 30 RECT_HEIGHT = 12 RECT_WIDTH = 40 MSG_WELCOME = "Welcome to Carmageddon - in real life!\n" MSG_OPTIONS = " [S] - start scanning...\t\t\t\t[Q] - Exit\n" MSG_OPTIONS = MSG_OPTIONS + " [1...9] - Direct connect to device by number\t\t[D] - Disconnect \n" MSG_DRIVE_HELP = "Use [Arrows] to drive, [SPACE] to Fire" class MainScreen: status = 0 lastmsg = None lasterror = None displaycounter = 0 car = CarControl() def __init__(self): curses.wrapper(self.mainloop) def createmidwin(self): win = curses.newwin(curses.LINES - (HEIGHT_TOP + HEIGHT_BOT), curses.COLS, HEIGHT_TOP, COL_START) win.scrollok(True) win.idlok(True) win.addstr(LINE_HEADING, COL_START, "Information:", curses.A_BOLD) win.move(0, 0) win.refresh() return win def createbotwin(self): win = curses.newwin(HEIGHT_BOT, curses.COLS, curses.LINES - HEIGHT_BOT, COL_START) win.addstr(LINE_HEADING, COL_START, "Bot window", curses.A_BOLD) win.move(0, 0) win.refresh() return win def drawheadings(self, window): window.addstr(LINE_HEADING, COL_START, MSG_WELCOME, curses.A_BOLD) window.addstr(LINE_OPTIONS, COL_START, MSG_OPTIONS) window.hline('_', curses.COLS) window.refresh() def resizescreen(self, midwin, botwin): midwin.resize(curses.LINES - (HEIGHT_TOP + HEIGHT_BOT), curses.COLS) botwin.mvwin(curses.LINES - HEIGHT_TOP - 1, COL_START) def updatestatus(self, window, status=0, msg="", error=""): self.status = status self.lastmsg = msg self.lasterror = error if window is None: return statusmsg = "Status: {} - {}".format(self.status, self.lastmsg) errmsg = "Error: {}".format(self.lasterror) if len(self.lasterror) > 0 else "" window.move(LINE_STATUS, COL_START) window.addstr(LINE_STATUS, COL_START, statusmsg) window.clrtoeol() window.move(LINE_ERROR, COL_START) window.addstr(LINE_ERROR, COL_START, errmsg) window.clrtoeol() window.refresh() def countdownstatus(self): self.displaycounter = DISPLAY_COUNT def checkstatus(self): if self.displaycounter > 1 and self.status > 0: self.displaycounter = self.displaycounter - 1 return False elif self.displaycounter == 1 and self.status > 0: self.status = 0 self.displaycounter = 0 return True else: return False def detailline(self, window, msg=""): window.clear() window.move(0, COL_START) window.addstr("{}".format(msg)) window.refresh() window.move(0, COL_START) def debugline(self, window, msg=""): window.move(0, COL_START) window.addstr("dbg: {}".format(msg)) window.clrtoeol() window.refresh() def mainloop(self, stdscr): self.drawheadings(stdscr) self.updatestatus(stdscr) midwin = self.createmidwin() botwin = self.createbotwin() self.debugline(botwin) stdscr.nodelay(True) while True: time.sleep(LOOP_DURATION) if self.checkstatus(): self.updatestatus(stdscr) inchar = stdscr.getch() curses.flushinp() # SCAN if inchar == ord('s') or inchar == ord('S'): self.updatestatus(stdscr, 1, "Scanning...") self.detailline(midwin) if self.car.scan(SCAN_TIMEOUT): self.updatestatus(stdscr, 1, "Scan - Done, found {} devices".format(len(self.car.devices))) else: #self.updatestatus(stdscr, 1, "Scan - Error", "Could not initiate scanning") self.updatestatus(stdscr, 1, "Scan - Error with scan, found {} devices".format(len(self.car.devices))) #self.countdownstatus() self.detailline(midwin, self.car.devicetext) self.debugline(botwin, "{}".format(self.car)) # Connect elif inchar >= ord('1') and inchar <= ord('9'): devnum = inchar - ord('1') + 1 self.debugline(botwin, "Device #{}".format(devnum)) self.updatestatus(stdscr, 2, "Connecting to car #{}...".format(devnum)) if self.car.connect((devnum-1)): self.updatestatus(stdscr, 2, "Connected to car #{} [{}]...".format(devnum, self.car.carName)) self.debugline(botwin, "Sending handshake...") self.car.sendhandshake() self.debugline(botwin, "Sending handshake, Done") self.detailline(midwin, MSG_DRIVE_HELP) else: self.updatestatus(stdscr, 2, "No connection to car #{}...".format(devnum)) self.debugline(botwin, "{}".format(self.car)) # Disconnect elif inchar == ord('d') or inchar == ord('D'): self.updatestatus(stdscr, 3, "Disconnecting...") if self.car.disconnectcar(): self.updatestatus(stdscr, 2, "Disconnect, Done") else: self.updatestatus(stdscr, 2, "Unable to disconnect car") self.detailline(midwin) self.debugline(botwin, "{}".format(self.car)) # Quit elif inchar == ord('q') or inchar == ord('Q'): if self.car.isConnected: self.car.disconnectcar(); break # Movement Actions elif inchar == ord(' '): if self.car.isConnected: self.car.carfiregun() elif inchar == curses.KEY_UP: if self.car.isConnected: self.car.carforward() elif inchar == curses.KEY_DOWN: if self.car.isConnected: self.car.carreverse() elif inchar == curses.KEY_LEFT: if self.car.isConnected: self.car.carleft() elif inchar == curses.KEY_RIGHT: if self.car.isConnected: self.car.carright() elif inchar == curses.KEY_RESIZE: curses.update_lines_cols() self.resizescreen(midwin, botwin) self.debugline(botwin, "resizing") self.drawheadings(stdscr) self.updatestatus(stdscr) elif inchar == curses.ERR or inchar == -1: continue else: continue if __name__ == '__main__': try: screen = MainScreen() except KeyboardInterrupt: os.sys.exit(0) # finally:
0
0
cbfa2caf1265110b8014de4c1cbc3f72c30c2833
2,736
py
Python
landwatch/model/unet.py
Lleyton-Ariton/landwatch
21e86e899d33d0ee349cf9bf87c6c13ebdab82fa
[ "MIT" ]
1
2021-06-07T06:04:49.000Z
2021-06-07T06:04:49.000Z
landwatch/model/unet.py
Lleyton-Ariton/landwatch
21e86e899d33d0ee349cf9bf87c6c13ebdab82fa
[ "MIT" ]
null
null
null
landwatch/model/unet.py
Lleyton-Ariton/landwatch
21e86e899d33d0ee349cf9bf87c6c13ebdab82fa
[ "MIT" ]
null
null
null
import math import torch import torch.nn as nn class ConvBlock(nn.Module): def __init__(self, in_channels: int, out_channels: int): super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.conv_block = nn.Sequential( nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1), nn.BatchNorm2d(self.out_channels), nn.ReLU(inplace=True), nn.Conv2d(self.out_channels, self.out_channels, kernel_size=3, padding=1), nn.BatchNorm2d(self.out_channels), nn.ReLU(inplace=True) ) def forward(self, x: torch.Tensor) -> torch.Tensor: return self.conv_block(x) class DownScalingBlock(nn.Module): def __init__(self, in_channels: int, out_channels: int): super().__init__() self.downscaling_block = nn.Sequential( nn.MaxPool2d(2), ConvBlock(in_channels=in_channels, out_channels=out_channels) ) def forward(self, x: torch.Tensor) -> torch.Tensor: return self.downscaling_block(x) class UNet(nn.Module): def __init__(self, in_channels: int, out_channels: int, bilinear: bool=True): super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.bilinear = bilinear left_layers = [pow(2, i) for i in range(6, 11)] self.left = nn.ModuleList([DownScalingBlock(self.in_channels, 64)]) self.right = nn.ModuleList([]) self.left.extend([ *[DownScalingBlock(left_layers[i], left_layers[i + 1]) for i in range(len(left_layers) - 1)] ]) self.right.extend([ ConvBlock(512 + 256, 256), ConvBlock(256 + 128, 128), ConvBlock(128 + 64, 64) ]) self.maxpool = nn.MaxPool2d(2) self.upsample = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) self.output = nn.Sequential( nn.Conv2d(64, self.out_channels, kernel_size=1), nn.Upsample(scale_factor=2), nn.Sigmoid() ) def forward(self, x: torch.Tensor) -> torch.Tensor: conv1 = self.left[0](x) conv2 = self.left[1](conv1) conv3 = self.left[2](conv2) x = self.left[3](conv3) x = self.upsample(x) x = torch.cat([x, conv3], dim=1) x = self.right[0](x) x = self.upsample(x) x = torch.cat([x, conv2], dim=1) x = self.right[1](x) x = self.upsample(x) x = torch.cat([x, conv1], dim=1) x = self.right[2](x) x = self.output(x) return x
26.057143
88
0.574196
import math import torch import torch.nn as nn class ConvBlock(nn.Module): def __init__(self, in_channels: int, out_channels: int): super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.conv_block = nn.Sequential( nn.Conv2d(self.in_channels, self.out_channels, kernel_size=3, padding=1), nn.BatchNorm2d(self.out_channels), nn.ReLU(inplace=True), nn.Conv2d(self.out_channels, self.out_channels, kernel_size=3, padding=1), nn.BatchNorm2d(self.out_channels), nn.ReLU(inplace=True) ) def forward(self, x: torch.Tensor) -> torch.Tensor: return self.conv_block(x) class DownScalingBlock(nn.Module): def __init__(self, in_channels: int, out_channels: int): super().__init__() self.downscaling_block = nn.Sequential( nn.MaxPool2d(2), ConvBlock(in_channels=in_channels, out_channels=out_channels) ) def forward(self, x: torch.Tensor) -> torch.Tensor: return self.downscaling_block(x) class UNet(nn.Module): def __init__(self, in_channels: int, out_channels: int, bilinear: bool=True): super().__init__() self.in_channels = in_channels self.out_channels = out_channels self.bilinear = bilinear left_layers = [pow(2, i) for i in range(6, 11)] self.left = nn.ModuleList([DownScalingBlock(self.in_channels, 64)]) self.right = nn.ModuleList([]) self.left.extend([ *[DownScalingBlock(left_layers[i], left_layers[i + 1]) for i in range(len(left_layers) - 1)] ]) self.right.extend([ ConvBlock(512 + 256, 256), ConvBlock(256 + 128, 128), ConvBlock(128 + 64, 64) ]) self.maxpool = nn.MaxPool2d(2) self.upsample = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) self.output = nn.Sequential( nn.Conv2d(64, self.out_channels, kernel_size=1), nn.Upsample(scale_factor=2), nn.Sigmoid() ) def forward(self, x: torch.Tensor) -> torch.Tensor: conv1 = self.left[0](x) conv2 = self.left[1](conv1) conv3 = self.left[2](conv2) x = self.left[3](conv3) x = self.upsample(x) x = torch.cat([x, conv3], dim=1) x = self.right[0](x) x = self.upsample(x) x = torch.cat([x, conv2], dim=1) x = self.right[1](x) x = self.upsample(x) x = torch.cat([x, conv1], dim=1) x = self.right[2](x) x = self.output(x) return x
0
0
2f9ce9ff91cdee6af0518b1de00124ffbd5bb250
5,714
py
Python
tests/functional/test_absensi.py
rdmaulana/absensi-api
d3dc8bef750ee0efc06e96689894dcdef2495ef6
[ "MIT" ]
null
null
null
tests/functional/test_absensi.py
rdmaulana/absensi-api
d3dc8bef750ee0efc06e96689894dcdef2495ef6
[ "MIT" ]
null
null
null
tests/functional/test_absensi.py
rdmaulana/absensi-api
d3dc8bef750ee0efc06e96689894dcdef2495ef6
[ "MIT" ]
null
null
null
import json from app.models import BaseModel def test_checkin_absen(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/in', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) print(response) assert response.status_code == 201 assert data['jamMasuk'] == BaseModel.generate_datetime_to_epoc() assert type(data['jamMasuk']) == int def test_checkin_absen_already(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/in', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert data['status'] == 'gagal' assert data['message'] == 'Anda sudah melakukan absen hari ini' def test_checkout_absen(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/out', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 201 assert data['jamKeluar'] == BaseModel.generate_datetime_to_epoc() assert type(data['jamKeluar']) == int def test_history_absensi_pegawai(test_client, user_token_pegawai): tgl_awal = BaseModel.generate_epoc_date() tgl_akhir = BaseModel.generate_epoc_date() response = test_client.get( f'/api/presensi/daftar/pegawai?tglAwal={tgl_awal}&tglAkhir={tgl_akhir}', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 200 assert type(data) == list assert len(data) != 0 def test_history_absensi_pegawai_notvalid(test_client, user_token_pegawai): tgl_awal = BaseModel.generate_epoc_date() response = test_client.get( f'/api/presensi/daftar/pegawai?tglAwal={tgl_awal}&tglAkhir=', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert data['status'] == 'gagal' assert data['message'] == 'Atribut form tidak lengkap' def test_get_combo_status_absen(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/combo/status-absen', headers = dict( Authorization = 'Bearer ' + user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 200 assert type(data) == list assert len(data) != 0 def test_get_history_absensi_pegawai_all(test_client, user_token): tgl_awal = BaseModel.generate_epoc_date() tgl_akhir = BaseModel.generate_epoc_date() response = test_client.get( '/api/presensi/daftar/admin?tglAwal={0}&tglAkhir={1}'.format(tgl_awal, tgl_akhir), headers = dict( Authorization = 'Bearer ' + user_token ) ) print(response) data = json.loads(response.data.decode()) assert response.status_code == 200 assert type(data) == list assert len(data) != 0 def test_history_absensi_pegawai_all_notvalid(test_client, user_token): tgl_awal = BaseModel.generate_epoc_date() response = test_client.get( f'/api/presensi/daftar/admin?tglAwal={tgl_awal}&tglAkhir=', headers = dict( Authorization = 'Bearer '+ user_token ) ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert data['status'] == 'gagal' assert data['message'] == 'Atribut paramater tidak lengkap' def test_history_absensi_pegawai_all_unauthorized(test_client, user_token_pegawai): tgl_awal = BaseModel.generate_epoc_date() tgl_akhir = BaseModel.generate_epoc_date() response = test_client.get( '/api/presensi/daftar/admin?tglAawal={0}&tglAkhir={1}'.format(tgl_awal, tgl_akhir), headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 401 assert response.content_type == 'application/json' assert data['status'] == 'gagal' assert data['message'] == 'Akses ditolak' def test_create_absensi(test_client, user_token_pegawai): response = test_client.post( '/api/presensi/absensi', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ), data = json.dumps( dict( tglAbsensi = BaseModel.generate_epoc_date(), kdStatus = 2 ) ), content_type = 'application/json' ) data = json.loads(response.data.decode()) print(response) assert response.status_code == 201 assert response.content_type == 'application/json' assert data['status'] == 'sukses' assert data['message'] == 'Berhasil menyimpan absensi' def test_create_absensi_failed(test_client, user_token_pegawai): response = test_client.post( '/api/presensi/absensi', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ), data = json.dumps( dict( tglAbsensi = BaseModel.generate_epoc_date(), ) ), content_type = 'application/json' ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert response.content_type == 'application/json' assert data['status'] == 'gagal' assert data['message'] == 'Atribut form tidak lengkap'
32.282486
91
0.650508
import json from app.models import BaseModel def test_checkin_absen(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/in', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) print(response) assert response.status_code == 201 assert data['jamMasuk'] == BaseModel.generate_datetime_to_epoc() assert type(data['jamMasuk']) == int def test_checkin_absen_already(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/in', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert data['status'] == 'gagal' assert data['message'] == 'Anda sudah melakukan absen hari ini' def test_checkout_absen(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/out', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 201 assert data['jamKeluar'] == BaseModel.generate_datetime_to_epoc() assert type(data['jamKeluar']) == int def test_history_absensi_pegawai(test_client, user_token_pegawai): tgl_awal = BaseModel.generate_epoc_date() tgl_akhir = BaseModel.generate_epoc_date() response = test_client.get( f'/api/presensi/daftar/pegawai?tglAwal={tgl_awal}&tglAkhir={tgl_akhir}', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 200 assert type(data) == list assert len(data) != 0 def test_history_absensi_pegawai_notvalid(test_client, user_token_pegawai): tgl_awal = BaseModel.generate_epoc_date() response = test_client.get( f'/api/presensi/daftar/pegawai?tglAwal={tgl_awal}&tglAkhir=', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert data['status'] == 'gagal' assert data['message'] == 'Atribut form tidak lengkap' def test_get_combo_status_absen(test_client, user_token_pegawai): response = test_client.get( '/api/presensi/combo/status-absen', headers = dict( Authorization = 'Bearer ' + user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 200 assert type(data) == list assert len(data) != 0 def test_get_history_absensi_pegawai_all(test_client, user_token): tgl_awal = BaseModel.generate_epoc_date() tgl_akhir = BaseModel.generate_epoc_date() response = test_client.get( '/api/presensi/daftar/admin?tglAwal={0}&tglAkhir={1}'.format(tgl_awal, tgl_akhir), headers = dict( Authorization = 'Bearer ' + user_token ) ) print(response) data = json.loads(response.data.decode()) assert response.status_code == 200 assert type(data) == list assert len(data) != 0 def test_history_absensi_pegawai_all_notvalid(test_client, user_token): tgl_awal = BaseModel.generate_epoc_date() response = test_client.get( f'/api/presensi/daftar/admin?tglAwal={tgl_awal}&tglAkhir=', headers = dict( Authorization = 'Bearer '+ user_token ) ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert data['status'] == 'gagal' assert data['message'] == 'Atribut paramater tidak lengkap' def test_history_absensi_pegawai_all_unauthorized(test_client, user_token_pegawai): tgl_awal = BaseModel.generate_epoc_date() tgl_akhir = BaseModel.generate_epoc_date() response = test_client.get( '/api/presensi/daftar/admin?tglAawal={0}&tglAkhir={1}'.format(tgl_awal, tgl_akhir), headers = dict( Authorization = 'Bearer '+ user_token_pegawai ) ) data = json.loads(response.data.decode()) assert response.status_code == 401 assert response.content_type == 'application/json' assert data['status'] == 'gagal' assert data['message'] == 'Akses ditolak' def test_create_absensi(test_client, user_token_pegawai): response = test_client.post( '/api/presensi/absensi', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ), data = json.dumps( dict( tglAbsensi = BaseModel.generate_epoc_date(), kdStatus = 2 ) ), content_type = 'application/json' ) data = json.loads(response.data.decode()) print(response) assert response.status_code == 201 assert response.content_type == 'application/json' assert data['status'] == 'sukses' assert data['message'] == 'Berhasil menyimpan absensi' def test_create_absensi_failed(test_client, user_token_pegawai): response = test_client.post( '/api/presensi/absensi', headers = dict( Authorization = 'Bearer '+ user_token_pegawai ), data = json.dumps( dict( tglAbsensi = BaseModel.generate_epoc_date(), ) ), content_type = 'application/json' ) data = json.loads(response.data.decode()) assert response.status_code == 400 assert response.content_type == 'application/json' assert data['status'] == 'gagal' assert data['message'] == 'Atribut form tidak lengkap'
0
0
0e2664f2020e42cdf1e34c5553fdb17e94b6240a
5,580
py
Python
spycy/operator.py
kanales/spycy
f8702bcbfed8eb60bc6e3fca76cb30d781d69cb0
[ "MIT" ]
null
null
null
spycy/operator.py
kanales/spycy
f8702bcbfed8eb60bc6e3fca76cb30d781d69cb0
[ "MIT" ]
null
null
null
spycy/operator.py
kanales/spycy
f8702bcbfed8eb60bc6e3fca76cb30d781d69cb0
[ "MIT" ]
null
null
null
""" This module contains all the functions from the ``operator`` module (bar some functions that dind't feel like they belonged here) transformed into a spice so it can be used more confortable. :Example: Consider adding ``2`` to a list of numbers:: map(add(2), [1,2,3,4,5]) """ import operator from spycy import spice __all__ = [ 'add', 'and_', 'contains', 'concat', 'countOf', 'eq', 'floordiv' , 'ge', 'getitem', 'gt', 'indexOf', 'is_', 'is_not', 'le', 'lshift' , 'lt', 'matmul', 'mod', 'mul', 'ne', 'or_', 'pos', 'pow', 'rshift' , 'sub', 'truediv', 'xor', 'neg', 'not_', 'index', 'itemgetter' , 'methodcaller', 'attrgetter', 'truth'] add = spice(lambda x,y: operator.__add__(x,y), name='add', doc=operator.add.__doc__) __add__ = spice(lambda x,y: operator.__add__(x,y), name='__add__', doc=operator.add.__doc__) and_ = spice(lambda x,y: operator.and_(x,y), name='and_', doc=operator.and_.__doc__) __and__ = spice(lambda x,y: operator.__and__(x,y), name='__and__', doc=operator.and_.__doc__) __contains__ = spice(lambda x,y: operator.__contains__(x,y), name='__contains__', doc=operator.contains.__doc__) contains = spice(lambda x,y: operator.contains(x,y), name='contains', doc=operator.contains.__doc__) concat = spice(lambda x,y: operator.concat(x,y), name='concat', doc=operator.concat.__doc__) countOf = spice(lambda x,y: operator.countOf(x,y), name='countOf', doc=operator.countOf.__doc__) eq = spice(lambda x,y: operator.eq(x,y), name='eq', doc=operator.eq.__doc__) __eq__ = spice(lambda x,y: operator.__eq__(x,y), name='__eq__', doc=operator.eq.__doc__) floordiv = spice(lambda x,y: operator.floordiv(x,y), name='floordiv', doc=operator.floordiv.__doc__) __floordiv__ = spice(lambda x,y: operator.__floordiv__(x,y), name='__floordiv__', doc=operator.floordiv.__doc__) # reversed ge = spice(lambda x,y: operator.ge(y,x), name='ge') __ge__ = spice(lambda x,y: operator.__ge__(y,x), name='__ge__') getitem = spice(lambda x,y: operator.getitem(x,y), name='getitem', doc=operator.getitem.__doc__) __getitem__ = spice(lambda x,y: operator.__getitem__(x,y), name='__getitem__', doc=operator.getitem.__doc__) # reversed gt = spice(lambda x,y: operator.gt(y,x), name='gt') __gt__ = spice(lambda x,y: operator.__gt__(y,x)) indexOf = spice(lambda x,y: operator.indexOf(x,y), name='indexOf', doc=operator.indexOf.__doc__) is_ = spice(lambda x,y: operator.is_(x,y), name='is_', doc=operator.is_.__doc__) is_not = spice(lambda x,y: operator.is_not(x,y), name='is_not', doc=operator.is_not.__doc__) # reversed le = spice(lambda x,y: operator.le(y,x), name='le') __le__ = spice(lambda x,y: operator.__le__(y,x), name='__le__') # reversed lshift = spice(lambda x,y: operator.lshift(y,x), name='lshift') __lshift__ = spice(lambda x,y: operator.__lshift__(y,x), name='__lshift__') # reversed lt = spice(lambda x,y: operator.lt(y,x), name='lt') __lt__ = spice(lambda x,y: operator.__lt__(y,x), name='__lt__') # reversed matmul = spice(lambda x,y: operator.matmul(y,x), name='matmul') __matmul__ = spice(lambda x,y: operator.__matmul__(y,x), name='__matmul__') # reversed mod = spice(lambda x,y: operator.mod(y,x), name='mod') __mod__ = spice(lambda x,y: operator.__mod__(y,x), name='__mod__') mul = spice(lambda x,y: operator.mul(x,y), name='mul', doc=operator.mul.__doc__) __mul__ = spice(lambda x,y: operator.__mul__(x,y), name='__mul__', doc=operator.mul.__doc__) ne = spice(lambda x,y: operator.ne(x,y), name='ne', doc=operator.ne.__doc__) __ne__ = spice(lambda x,y: operator.__ne__(x,y), name='__ne__', doc=operator.ne.__doc__) or_ = spice(lambda x,y: operator.or_(x,y), name='or_', doc=operator.or_.__doc__) __or__ = spice(lambda x,y: operator.__or__(x,y), name='__or__', doc=operator.or_.__doc__) pos = spice(lambda x,y: operator.pos(x,y), name='pos', doc=operator.pos.__doc__) #reversed pow = spice(lambda x,y: operator.pow(y,x), name='pow') __pow__ = spice(lambda x,y: operator.__pow__(y,x), name='__pow__') # reversed rshift = spice(lambda x,y: operator.rshift(y,x), name='rshift') __rshift__ = spice(lambda x,y: operator.__rshift__(y,x), name='__rshift__') # reversed sub = spice(lambda x,y: operator.sub(y,x), name='sub') __sub__ = spice(lambda x,y: operator.__sub__(y,x), name='__sub__') # reversed truediv = spice(lambda x,y: operator.truediv(y,x), name='truediv') __truediv__ = spice(lambda x,y: operator.__truediv__(y,x), name='__truediv__') xor = spice(lambda x,y: operator.xor(x,y), name='xor', doc=operator.xor.__doc__) __xor__ = spice(lambda x,y: operator.__xor__(x,y), name='__xor__', doc=operator.xor.__doc__) ################################################# neg = spice(lambda x: operator.neg(x), name='neg', doc=operator.neg.__doc__) __neg__ = spice(lambda x: operator.__neg__(x), name='__neg__', doc=operator.neg.__doc__) not_ = spice(lambda x: operator.not_(x), name='not_', doc=operator.not_.__doc__) __not__ = spice(lambda x: operator.__not__(x), name='__not__', doc=operator.not_.__doc__) index = spice(lambda x: operator.index(x), name='index', doc=operator.index.__doc__) __index__ = spice(lambda x: operator.__index__(x), name='__index__', doc=operator.index.__doc__) itemgetter = spice(lambda x: operator.itemgetter(x), name='itemgetter', doc=operator.itemgetter.__doc__) methodcaller = spice(lambda x: operator.methodcaller(x), name='methodcaller', doc=operator.methodcaller.__doc__) attrgetter = spice(lambda x: operator.attrgetter(x), name='attrgetter', doc=operator.attrgetter.__doc__) truth = spice(lambda x: operator.truth(x), name='truth', doc=operator.truth.__doc__)
45.365854
112
0.704659
""" This module contains all the functions from the ``operator`` module (bar some functions that dind't feel like they belonged here) transformed into a spice so it can be used more confortable. :Example: Consider adding ``2`` to a list of numbers:: map(add(2), [1,2,3,4,5]) """ import operator from spycy import spice __all__ = [ 'add', 'and_', 'contains', 'concat', 'countOf', 'eq', 'floordiv' , 'ge', 'getitem', 'gt', 'indexOf', 'is_', 'is_not', 'le', 'lshift' , 'lt', 'matmul', 'mod', 'mul', 'ne', 'or_', 'pos', 'pow', 'rshift' , 'sub', 'truediv', 'xor', 'neg', 'not_', 'index', 'itemgetter' , 'methodcaller', 'attrgetter', 'truth'] add = spice(lambda x,y: operator.__add__(x,y), name='add', doc=operator.add.__doc__) __add__ = spice(lambda x,y: operator.__add__(x,y), name='__add__', doc=operator.add.__doc__) and_ = spice(lambda x,y: operator.and_(x,y), name='and_', doc=operator.and_.__doc__) __and__ = spice(lambda x,y: operator.__and__(x,y), name='__and__', doc=operator.and_.__doc__) __contains__ = spice(lambda x,y: operator.__contains__(x,y), name='__contains__', doc=operator.contains.__doc__) contains = spice(lambda x,y: operator.contains(x,y), name='contains', doc=operator.contains.__doc__) concat = spice(lambda x,y: operator.concat(x,y), name='concat', doc=operator.concat.__doc__) countOf = spice(lambda x,y: operator.countOf(x,y), name='countOf', doc=operator.countOf.__doc__) eq = spice(lambda x,y: operator.eq(x,y), name='eq', doc=operator.eq.__doc__) __eq__ = spice(lambda x,y: operator.__eq__(x,y), name='__eq__', doc=operator.eq.__doc__) floordiv = spice(lambda x,y: operator.floordiv(x,y), name='floordiv', doc=operator.floordiv.__doc__) __floordiv__ = spice(lambda x,y: operator.__floordiv__(x,y), name='__floordiv__', doc=operator.floordiv.__doc__) # reversed ge = spice(lambda x,y: operator.ge(y,x), name='ge') __ge__ = spice(lambda x,y: operator.__ge__(y,x), name='__ge__') getitem = spice(lambda x,y: operator.getitem(x,y), name='getitem', doc=operator.getitem.__doc__) __getitem__ = spice(lambda x,y: operator.__getitem__(x,y), name='__getitem__', doc=operator.getitem.__doc__) # reversed gt = spice(lambda x,y: operator.gt(y,x), name='gt') __gt__ = spice(lambda x,y: operator.__gt__(y,x)) indexOf = spice(lambda x,y: operator.indexOf(x,y), name='indexOf', doc=operator.indexOf.__doc__) is_ = spice(lambda x,y: operator.is_(x,y), name='is_', doc=operator.is_.__doc__) is_not = spice(lambda x,y: operator.is_not(x,y), name='is_not', doc=operator.is_not.__doc__) # reversed le = spice(lambda x,y: operator.le(y,x), name='le') __le__ = spice(lambda x,y: operator.__le__(y,x), name='__le__') # reversed lshift = spice(lambda x,y: operator.lshift(y,x), name='lshift') __lshift__ = spice(lambda x,y: operator.__lshift__(y,x), name='__lshift__') # reversed lt = spice(lambda x,y: operator.lt(y,x), name='lt') __lt__ = spice(lambda x,y: operator.__lt__(y,x), name='__lt__') # reversed matmul = spice(lambda x,y: operator.matmul(y,x), name='matmul') __matmul__ = spice(lambda x,y: operator.__matmul__(y,x), name='__matmul__') # reversed mod = spice(lambda x,y: operator.mod(y,x), name='mod') __mod__ = spice(lambda x,y: operator.__mod__(y,x), name='__mod__') mul = spice(lambda x,y: operator.mul(x,y), name='mul', doc=operator.mul.__doc__) __mul__ = spice(lambda x,y: operator.__mul__(x,y), name='__mul__', doc=operator.mul.__doc__) ne = spice(lambda x,y: operator.ne(x,y), name='ne', doc=operator.ne.__doc__) __ne__ = spice(lambda x,y: operator.__ne__(x,y), name='__ne__', doc=operator.ne.__doc__) or_ = spice(lambda x,y: operator.or_(x,y), name='or_', doc=operator.or_.__doc__) __or__ = spice(lambda x,y: operator.__or__(x,y), name='__or__', doc=operator.or_.__doc__) pos = spice(lambda x,y: operator.pos(x,y), name='pos', doc=operator.pos.__doc__) #reversed pow = spice(lambda x,y: operator.pow(y,x), name='pow') __pow__ = spice(lambda x,y: operator.__pow__(y,x), name='__pow__') # reversed rshift = spice(lambda x,y: operator.rshift(y,x), name='rshift') __rshift__ = spice(lambda x,y: operator.__rshift__(y,x), name='__rshift__') # reversed sub = spice(lambda x,y: operator.sub(y,x), name='sub') __sub__ = spice(lambda x,y: operator.__sub__(y,x), name='__sub__') # reversed truediv = spice(lambda x,y: operator.truediv(y,x), name='truediv') __truediv__ = spice(lambda x,y: operator.__truediv__(y,x), name='__truediv__') xor = spice(lambda x,y: operator.xor(x,y), name='xor', doc=operator.xor.__doc__) __xor__ = spice(lambda x,y: operator.__xor__(x,y), name='__xor__', doc=operator.xor.__doc__) ################################################# neg = spice(lambda x: operator.neg(x), name='neg', doc=operator.neg.__doc__) __neg__ = spice(lambda x: operator.__neg__(x), name='__neg__', doc=operator.neg.__doc__) not_ = spice(lambda x: operator.not_(x), name='not_', doc=operator.not_.__doc__) __not__ = spice(lambda x: operator.__not__(x), name='__not__', doc=operator.not_.__doc__) index = spice(lambda x: operator.index(x), name='index', doc=operator.index.__doc__) __index__ = spice(lambda x: operator.__index__(x), name='__index__', doc=operator.index.__doc__) itemgetter = spice(lambda x: operator.itemgetter(x), name='itemgetter', doc=operator.itemgetter.__doc__) methodcaller = spice(lambda x: operator.methodcaller(x), name='methodcaller', doc=operator.methodcaller.__doc__) attrgetter = spice(lambda x: operator.attrgetter(x), name='attrgetter', doc=operator.attrgetter.__doc__) truth = spice(lambda x: operator.truth(x), name='truth', doc=operator.truth.__doc__)
0
0
8f22602f2a4aaf4a77b2fbdbfd6ec7f59e6787bf
2,577
bzl
Python
recipes/brotli/config.bzl
curoky/rules_cc
943408c05e2204e1e603b70db05037217a53868d
[ "Apache-2.0" ]
3
2022-02-06T10:10:44.000Z
2022-02-07T11:53:25.000Z
recipes/brotli/config.bzl
curoky/rules_cc
943408c05e2204e1e603b70db05037217a53868d
[ "Apache-2.0" ]
null
null
null
recipes/brotli/config.bzl
curoky/rules_cc
943408c05e2204e1e603b70db05037217a53868d
[ "Apache-2.0" ]
null
null
null
# Copyright 2021 curoky(cccuroky@gmail.com). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. config = { "name": "com_github_google_brotli", "type": "git_repository", # "remote": "https://github.com/google/brotli", "remote": "https://github.com/pefoley2/brotli", "used_version": "heads/master", "versions": { "heads/master": {}, "tags/v1.0.9": {}, }, } # Note: # 1: after v1.0.9, brotli use vla-parameter, which gcc-11 throw error by default # fix pr: https://github.com/google/brotli/pull/904 # external/com_github_google_brotli/c/dec/decode.c:2036:41: error: argument 2 of type 'const uint8_t *' {aka 'const unsigned char *'} declared as a pointer [-Werror=vla-parameter] # 2036 | size_t encoded_size, const uint8_t* encoded_buffer, size_t* decoded_size, # | ~~~~~~~~~~~~~~~^~~~~~~~~~~~~~ # In file included from external/com_github_google_brotli/c/dec/decode.c:7: # bazel-out/k8-dbg/bin/external/com_github_google_brotli/_virtual_includes/brotli_inc/brotli/decode.h:204:19: note: previously declared as a variable length array 'const uint8_t[*decoded_size]' {aka 'const unsigned char[*decoded_size]'} # 204 | const uint8_t encoded_buffer[BROTLI_ARRAY_PARAM(encoded_size)], # | ~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # external/com_github_google_brotli/c/dec/decode.c:2037:14: error: argument 4 of type 'uint8_t *' {aka 'unsigned char *'} declared as a pointer [-Werror=vla-parameter] # 2037 | uint8_t* decoded_buffer) { # | ~~~~~~~~~^~~~~~~~~~~~~~ # In file included from external/com_github_google_brotli/c/dec/decode.c:7: # bazel-out/k8-dbg/bin/external/com_github_google_brotli/_virtual_includes/brotli_inc/brotli/decode.h:206:13: note: previously declared as a variable length array 'uint8_t[encoded_size]' {aka 'unsigned char[encoded_size]'} # 206 | uint8_t decoded_buffer[BROTLI_ARRAY_PARAM(*decoded_size)]); # | ~~~~~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # cc1: all warnings being treated as errors
57.266667
236
0.668607
# Copyright 2021 curoky(cccuroky@gmail.com). # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. config = { "name": "com_github_google_brotli", "type": "git_repository", # "remote": "https://github.com/google/brotli", "remote": "https://github.com/pefoley2/brotli", "used_version": "heads/master", "versions": { "heads/master": {}, "tags/v1.0.9": {}, }, } # Note: # 1: after v1.0.9, brotli use vla-parameter, which gcc-11 throw error by default # fix pr: https://github.com/google/brotli/pull/904 # external/com_github_google_brotli/c/dec/decode.c:2036:41: error: argument 2 of type 'const uint8_t *' {aka 'const unsigned char *'} declared as a pointer [-Werror=vla-parameter] # 2036 | size_t encoded_size, const uint8_t* encoded_buffer, size_t* decoded_size, # | ~~~~~~~~~~~~~~~^~~~~~~~~~~~~~ # In file included from external/com_github_google_brotli/c/dec/decode.c:7: # bazel-out/k8-dbg/bin/external/com_github_google_brotli/_virtual_includes/brotli_inc/brotli/decode.h:204:19: note: previously declared as a variable length array 'const uint8_t[*decoded_size]' {aka 'const unsigned char[*decoded_size]'} # 204 | const uint8_t encoded_buffer[BROTLI_ARRAY_PARAM(encoded_size)], # | ~~~~~~~~~~~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # external/com_github_google_brotli/c/dec/decode.c:2037:14: error: argument 4 of type 'uint8_t *' {aka 'unsigned char *'} declared as a pointer [-Werror=vla-parameter] # 2037 | uint8_t* decoded_buffer) { # | ~~~~~~~~~^~~~~~~~~~~~~~ # In file included from external/com_github_google_brotli/c/dec/decode.c:7: # bazel-out/k8-dbg/bin/external/com_github_google_brotli/_virtual_includes/brotli_inc/brotli/decode.h:206:13: note: previously declared as a variable length array 'uint8_t[encoded_size]' {aka 'unsigned char[encoded_size]'} # 206 | uint8_t decoded_buffer[BROTLI_ARRAY_PARAM(*decoded_size)]); # | ~~~~~~~~^~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # cc1: all warnings being treated as errors
0
0
1e9f1106e32966139e9187c455ded6eb0bcb3ee5
565
py
Python
vvphotos/serializers.py
synw/django-vvphotos
3dd93fbe8a29d8db6fe440a40ee700d229da537b
[ "MIT" ]
1
2017-04-05T04:09:00.000Z
2017-04-05T04:09:00.000Z
vvphotos/serializers.py
synw/django-vvphotos
3dd93fbe8a29d8db6fe440a40ee700d229da537b
[ "MIT" ]
null
null
null
vvphotos/serializers.py
synw/django-vvphotos
3dd93fbe8a29d8db6fe440a40ee700d229da537b
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from django.contrib.contenttypes.models import ContentType from rest_framework import serializers from vvphotos.models import Album class AlbumSerializer(serializers.ModelSerializer): class Meta: model = Album fields = read_only_fields = ["slug", "title", "image", "description", "url", "photos", 'children'] depth = 1 class AlbumsSerializer(serializers.ModelSerializer): class Meta: model = Album fields = read_only_fields = ["slug", "title", "parent", "image", "description", "url"]
25.681818
106
0.677876
# -*- coding: utf-8 -*- from django.contrib.contenttypes.models import ContentType from rest_framework import serializers from vvphotos.models import Album class AlbumSerializer(serializers.ModelSerializer): class Meta: model = Album fields = read_only_fields = ["slug", "title", "image", "description", "url", "photos", 'children'] depth = 1 class AlbumsSerializer(serializers.ModelSerializer): class Meta: model = Album fields = read_only_fields = ["slug", "title", "parent", "image", "description", "url"]
0
0
2e44b8dfcbd49f05cef5d226ed68147f4d615605
6,650
py
Python
docusign_esign/models/currency_feature_set_price.py
hunk/docusign-python-client
a643c42c1236715e74eef6fc279a1b29da1b5455
[ "MIT" ]
null
null
null
docusign_esign/models/currency_feature_set_price.py
hunk/docusign-python-client
a643c42c1236715e74eef6fc279a1b29da1b5455
[ "MIT" ]
null
null
null
docusign_esign/models/currency_feature_set_price.py
hunk/docusign-python-client
a643c42c1236715e74eef6fc279a1b29da1b5455
[ "MIT" ]
null
null
null
# coding: utf-8 """ DocuSign REST API The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. OpenAPI spec version: v2.1 Contact: devcenter@docusign.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class CurrencyFeatureSetPrice(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ def __init__(self, currency_code=None, currency_symbol=None, envelope_fee=None, fixed_fee=None, seat_fee=None): """ CurrencyFeatureSetPrice - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition. """ self.swagger_types = { 'currency_code': 'str', 'currency_symbol': 'str', 'envelope_fee': 'str', 'fixed_fee': 'str', 'seat_fee': 'str' } self.attribute_map = { 'currency_code': 'currencyCode', 'currency_symbol': 'currencySymbol', 'envelope_fee': 'envelopeFee', 'fixed_fee': 'fixedFee', 'seat_fee': 'seatFee' } self._currency_code = currency_code self._currency_symbol = currency_symbol self._envelope_fee = envelope_fee self._fixed_fee = fixed_fee self._seat_fee = seat_fee @property def currency_code(self): """ Gets the currency_code of this CurrencyFeatureSetPrice. Specifies the alternate ISO currency code for the account. :return: The currency_code of this CurrencyFeatureSetPrice. :rtype: str """ return self._currency_code @currency_code.setter def currency_code(self, currency_code): """ Sets the currency_code of this CurrencyFeatureSetPrice. Specifies the alternate ISO currency code for the account. :param currency_code: The currency_code of this CurrencyFeatureSetPrice. :type: str """ self._currency_code = currency_code @property def currency_symbol(self): """ Gets the currency_symbol of this CurrencyFeatureSetPrice. Specifies the alternate currency symbol for the account. :return: The currency_symbol of this CurrencyFeatureSetPrice. :rtype: str """ return self._currency_symbol @currency_symbol.setter def currency_symbol(self, currency_symbol): """ Sets the currency_symbol of this CurrencyFeatureSetPrice. Specifies the alternate currency symbol for the account. :param currency_symbol: The currency_symbol of this CurrencyFeatureSetPrice. :type: str """ self._currency_symbol = currency_symbol @property def envelope_fee(self): """ Gets the envelope_fee of this CurrencyFeatureSetPrice. An incremental envelope cost for plans with envelope overages (when `isEnabled` is set to **true**.) :return: The envelope_fee of this CurrencyFeatureSetPrice. :rtype: str """ return self._envelope_fee @envelope_fee.setter def envelope_fee(self, envelope_fee): """ Sets the envelope_fee of this CurrencyFeatureSetPrice. An incremental envelope cost for plans with envelope overages (when `isEnabled` is set to **true**.) :param envelope_fee: The envelope_fee of this CurrencyFeatureSetPrice. :type: str """ self._envelope_fee = envelope_fee @property def fixed_fee(self): """ Gets the fixed_fee of this CurrencyFeatureSetPrice. Specifies a one-time fee associated with the plan (when `isEnabled` is set to **true**.) :return: The fixed_fee of this CurrencyFeatureSetPrice. :rtype: str """ return self._fixed_fee @fixed_fee.setter def fixed_fee(self, fixed_fee): """ Sets the fixed_fee of this CurrencyFeatureSetPrice. Specifies a one-time fee associated with the plan (when `isEnabled` is set to **true**.) :param fixed_fee: The fixed_fee of this CurrencyFeatureSetPrice. :type: str """ self._fixed_fee = fixed_fee @property def seat_fee(self): """ Gets the seat_fee of this CurrencyFeatureSetPrice. Specifies an incremental seat cost for seat-based plans (when `isEnabled` is set to **true**.) :return: The seat_fee of this CurrencyFeatureSetPrice. :rtype: str """ return self._seat_fee @seat_fee.setter def seat_fee(self, seat_fee): """ Sets the seat_fee of this CurrencyFeatureSetPrice. Specifies an incremental seat cost for seat-based plans (when `isEnabled` is set to **true**.) :param seat_fee: The seat_fee of this CurrencyFeatureSetPrice. :type: str """ self._seat_fee = seat_fee def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
30.365297
126
0.603308
# coding: utf-8 """ DocuSign REST API The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign. OpenAPI spec version: v2.1 Contact: devcenter@docusign.com Generated by: https://github.com/swagger-api/swagger-codegen.git """ from pprint import pformat from six import iteritems import re class CurrencyFeatureSetPrice(object): """ NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ def __init__(self, currency_code=None, currency_symbol=None, envelope_fee=None, fixed_fee=None, seat_fee=None): """ CurrencyFeatureSetPrice - a model defined in Swagger :param dict swaggerTypes: The key is attribute name and the value is attribute type. :param dict attributeMap: The key is attribute name and the value is json key in definition. """ self.swagger_types = { 'currency_code': 'str', 'currency_symbol': 'str', 'envelope_fee': 'str', 'fixed_fee': 'str', 'seat_fee': 'str' } self.attribute_map = { 'currency_code': 'currencyCode', 'currency_symbol': 'currencySymbol', 'envelope_fee': 'envelopeFee', 'fixed_fee': 'fixedFee', 'seat_fee': 'seatFee' } self._currency_code = currency_code self._currency_symbol = currency_symbol self._envelope_fee = envelope_fee self._fixed_fee = fixed_fee self._seat_fee = seat_fee @property def currency_code(self): """ Gets the currency_code of this CurrencyFeatureSetPrice. Specifies the alternate ISO currency code for the account. :return: The currency_code of this CurrencyFeatureSetPrice. :rtype: str """ return self._currency_code @currency_code.setter def currency_code(self, currency_code): """ Sets the currency_code of this CurrencyFeatureSetPrice. Specifies the alternate ISO currency code for the account. :param currency_code: The currency_code of this CurrencyFeatureSetPrice. :type: str """ self._currency_code = currency_code @property def currency_symbol(self): """ Gets the currency_symbol of this CurrencyFeatureSetPrice. Specifies the alternate currency symbol for the account. :return: The currency_symbol of this CurrencyFeatureSetPrice. :rtype: str """ return self._currency_symbol @currency_symbol.setter def currency_symbol(self, currency_symbol): """ Sets the currency_symbol of this CurrencyFeatureSetPrice. Specifies the alternate currency symbol for the account. :param currency_symbol: The currency_symbol of this CurrencyFeatureSetPrice. :type: str """ self._currency_symbol = currency_symbol @property def envelope_fee(self): """ Gets the envelope_fee of this CurrencyFeatureSetPrice. An incremental envelope cost for plans with envelope overages (when `isEnabled` is set to **true**.) :return: The envelope_fee of this CurrencyFeatureSetPrice. :rtype: str """ return self._envelope_fee @envelope_fee.setter def envelope_fee(self, envelope_fee): """ Sets the envelope_fee of this CurrencyFeatureSetPrice. An incremental envelope cost for plans with envelope overages (when `isEnabled` is set to **true**.) :param envelope_fee: The envelope_fee of this CurrencyFeatureSetPrice. :type: str """ self._envelope_fee = envelope_fee @property def fixed_fee(self): """ Gets the fixed_fee of this CurrencyFeatureSetPrice. Specifies a one-time fee associated with the plan (when `isEnabled` is set to **true**.) :return: The fixed_fee of this CurrencyFeatureSetPrice. :rtype: str """ return self._fixed_fee @fixed_fee.setter def fixed_fee(self, fixed_fee): """ Sets the fixed_fee of this CurrencyFeatureSetPrice. Specifies a one-time fee associated with the plan (when `isEnabled` is set to **true**.) :param fixed_fee: The fixed_fee of this CurrencyFeatureSetPrice. :type: str """ self._fixed_fee = fixed_fee @property def seat_fee(self): """ Gets the seat_fee of this CurrencyFeatureSetPrice. Specifies an incremental seat cost for seat-based plans (when `isEnabled` is set to **true**.) :return: The seat_fee of this CurrencyFeatureSetPrice. :rtype: str """ return self._seat_fee @seat_fee.setter def seat_fee(self, seat_fee): """ Sets the seat_fee of this CurrencyFeatureSetPrice. Specifies an incremental seat cost for seat-based plans (when `isEnabled` is set to **true**.) :param seat_fee: The seat_fee of this CurrencyFeatureSetPrice. :type: str """ self._seat_fee = seat_fee def to_dict(self): """ Returns the model properties as a dict """ result = {} for attr, _ in iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """ Returns the string representation of the model """ return pformat(self.to_dict()) def __repr__(self): """ For `print` and `pprint` """ return self.to_str() def __eq__(self, other): """ Returns true if both objects are equal """ return self.__dict__ == other.__dict__ def __ne__(self, other): """ Returns true if both objects are not equal """ return not self == other
0
0
374c484b3ff01260b21cf925102e1875ef9938a6
477
py
Python
WarelogPyApi/app.py
propil5/WarelogManager
6baf338855175259877257352f9986a02ffd3e2e
[ "MIT" ]
null
null
null
WarelogPyApi/app.py
propil5/WarelogManager
6baf338855175259877257352f9986a02ffd3e2e
[ "MIT" ]
null
null
null
WarelogPyApi/app.py
propil5/WarelogManager
6baf338855175259877257352f9986a02ffd3e2e
[ "MIT" ]
null
null
null
from flask.helpers import url_for from pyTrendsExtensions import GetTrendingOverTime from flask import Flask, redirect # from flask_restful import Api, Resource, reqparse, abort, fields, marshal_with # from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) # api = Api(app) @app.route("/") def hello(): return redirect("http://127.0.0.1:5000/test", code=302) @app.route("/<keyword>") def GetTrandingDataForKeyword(keyword): return GetTrendingOverTime(keyword)
29.8125
80
0.769392
from flask.helpers import url_for from pyTrendsExtensions import GetTrendingOverTime from flask import Flask, redirect # from flask_restful import Api, Resource, reqparse, abort, fields, marshal_with # from flask_sqlalchemy import SQLAlchemy app = Flask(__name__) # api = Api(app) @app.route("/") def hello(): return redirect("http://127.0.0.1:5000/test", code=302) @app.route("/<keyword>") def GetTrandingDataForKeyword(keyword): return GetTrendingOverTime(keyword)
0
0
06d5b67bfa6a6b800139d199e86030897e4fbec4
1,190
py
Python
TCPServer/server.py
listenzcc/BCIMiddleware
80f74731b4df7f6da84c5df0c67e0ca4e6af7102
[ "MIT" ]
null
null
null
TCPServer/server.py
listenzcc/BCIMiddleware
80f74731b4df7f6da84c5df0c67e0ca4e6af7102
[ "MIT" ]
null
null
null
TCPServer/server.py
listenzcc/BCIMiddleware
80f74731b4df7f6da84c5df0c67e0ca4e6af7102
[ "MIT" ]
null
null
null
''' TCP server interface for console. The TCP server will be automatically built. - @interface: The function for user interface, and keep the server running. ''' from . import logger from .defines import TCPServer server = TCPServer() server.start() def interface(): logger.info(f'Interface starts') help_msg = dict( h="Show help message", q="Quit", list="List the alive sessions", send="Send message through all alive sessions, send [message]" ) while True: inp = input('>> ') if inp == 'q': break if inp == 'h': for key, value in help_msg.items(): print(f'{key}: {value}') continue if inp == 'list': for i, session in enumerate(server.alive_sessions()): print(f'[{i}]', session.address) continue if inp.startswith('send '): message = inp.split(' ', 1)[1] for session in server.alive_sessions(): session.send(message) continue print('ByeBye') logger.info(f'Interface stops') return 0
23.8
76
0.532773
''' TCP server interface for console. The TCP server will be automatically built. - @interface: The function for user interface, and keep the server running. ''' from . import logger from .defines import TCPServer server = TCPServer() server.start() def interface(): logger.info(f'Interface starts') help_msg = dict( h="Show help message", q="Quit", list="List the alive sessions", send="Send message through all alive sessions, send [message]" ) while True: inp = input('>> ') if inp == 'q': break if inp == 'h': for key, value in help_msg.items(): print(f'{key}: {value}') continue if inp == 'list': for i, session in enumerate(server.alive_sessions()): print(f'[{i}]', session.address) continue if inp.startswith('send '): message = inp.split(' ', 1)[1] for session in server.alive_sessions(): session.send(message) continue print('ByeBye') logger.info(f'Interface stops') return 0
0
0
036784373473c2b9223ab0d805911bb033726125
712
py
Python
ideas/views.py
Adstefnum/ScrapBook
8391e86daf678f64a30dd693e34cd69939b6fe0d
[ "MIT" ]
null
null
null
ideas/views.py
Adstefnum/ScrapBook
8391e86daf678f64a30dd693e34cd69939b6fe0d
[ "MIT" ]
null
null
null
ideas/views.py
Adstefnum/ScrapBook
8391e86daf678f64a30dd693e34cd69939b6fe0d
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.views.generic import ListView from .models import Video,Audio,Image,Note class IndexView(ListView): template_name = "ideas/index.html" def get_queryset(self): return Note.objects.all() class AudioView(ListView): template_name = "ideas/audio.html" def get_queryset(self): return Audio.objects.all() class VideoView(ListView): template_name = "ideas/video.html" def get_queryset(self): return Video.objects.all() class ImageView(ListView): template_name = "ideas/image.html" def get_queryset(self): return Image.objects.all() class NoteView(ListView): template_name = "ideas/note.html" def get_queryset(self): return Note.objects.all()
20.941176
42
0.76264
from django.shortcuts import render from django.views.generic import ListView from .models import Video,Audio,Image,Note class IndexView(ListView): template_name = "ideas/index.html" def get_queryset(self): return Note.objects.all() class AudioView(ListView): template_name = "ideas/audio.html" def get_queryset(self): return Audio.objects.all() class VideoView(ListView): template_name = "ideas/video.html" def get_queryset(self): return Video.objects.all() class ImageView(ListView): template_name = "ideas/image.html" def get_queryset(self): return Image.objects.all() class NoteView(ListView): template_name = "ideas/note.html" def get_queryset(self): return Note.objects.all()
0
0
15aa8a1888009a607ef0c0137fb959c9365621e7
3,701
py
Python
src/animal.py
Doankt/Rabbit-Wolf-Evolution-Simulation
ae3d3cb4cad7fd5059921d0a3cf41d93ab56e88e
[ "MIT" ]
null
null
null
src/animal.py
Doankt/Rabbit-Wolf-Evolution-Simulation
ae3d3cb4cad7fd5059921d0a3cf41d93ab56e88e
[ "MIT" ]
null
null
null
src/animal.py
Doankt/Rabbit-Wolf-Evolution-Simulation
ae3d3cb4cad7fd5059921d0a3cf41d93ab56e88e
[ "MIT" ]
null
null
null
from worldtools import * from enum import Enum from math import sin, cos, pi from random import uniform class State(Enum): ROAM = 0 REPRODUCE = 1 class Animal: """Class representing Animal in the world""" def __init__(self, world, pos: (float, float), speed: float): """ Initializes the Animal Args: world (World): The world pos ( (float, float) ): Starting position speed (float): Animal speed """ self.speed = speed self.pos = pos self.world = world # Movement variables self.target = None self.movement_angle = uniform(0, pi*2) # Food variables self.hunger = 100 self.eat_count = 0 self._food_checkpoint = 0 # Set state self.state = State.ROAM def move(self) -> Exception: """ Moves an animal based on state Raises: NotImplementedError: Should be overwritten in a derived class Returns: Exception: Will always raise NotImplementedError if called from Animal class """ raise NotImplementedError() def draw(self, screen) -> Exception: """ Draws an animal to the screen Args: screen (pygame.screen): pygame screen Raises: NotImplementedError: Should be overwritten in a derived class Returns: Exception: Will always raise NotImplementedError if called from Animal class """ raise NotImplementedError() def sight_entities(self) -> (["Food"], ["Rabbit"], ["Wolf"]): """ Returns all entites in vision of the Animal Args: self (Animal): self Returns: ([Food], [Rabbit], [Wolf]): Returns a 3-tuple with Food, Rabbit, Wolf in vision """ # Get foods around self foodlist = [] for food in self.world.food: if self != food and self._in_sight(food): foodlist.append(food) # Get rabbits around self rabbitlist = [] for rabbit in self.world.rabbits: if self != rabbit and self._in_sight(rabbit): rabbitlist.append(rabbit) # Get wolves around self wolflist = [] for wolf in self.world.wolves: if self != wolf and self._in_sight(wolf): wolflist.append(wolf) # Sort by distance to self foodlist.sort(key=lambda x: distance(self.pos, x.pos)) rabbitlist.sort(key=lambda x: distance(self.pos, x.pos)) wolflist.sort(key=lambda x: distance(self.pos, x.pos)) return (foodlist, rabbitlist, wolflist) def eat(self, inc: float) -> None: """ Increments eating food source and adds to hunger Args: inc (int): Amount to increase hunger """ # Increment eat count self.eat_count += 1 # Limit to 100 if self.hunger + inc >= 100: self.hunger = 100 else: self.hunger += inc def roam_move(self) -> None: """ Moves Animal in the direction they are facing and slightly changes movement angle """ # Proposed move new_x = self.pos[0] + (self.speed * cos(self.movement_angle)) new_y = self.pos[1] + (self.speed * sin(self.movement_angle)) # Check if valid move while not self.world.in_bounds((new_x, new_y)): # Reset move self.movement_angle += pi/2 new_x = self.pos[0] + (self.speed * cos(self.movement_angle)) new_y = self.pos[1] + (self.speed * sin(self.movement_angle)) # Confirm move self.pos = ( new_x, new_y ) # Adjust movement angle self.movement_angle += uniform(-pi*2 / 36, pi*2 / 36) def _in_sight(self, entity) -> bool: """ Returns if an entity (which has a pos) is in sight of the Animal Args: entity (Animal or Food): Entity to check Returns: bool: True if entity is in sight, False otherwise """ return distance(self.pos, entity.pos) <= self.sight def __repr__(self) -> str: return "{}".format(self.pos) # return "speed={}, pos={}, hunger={}, state={}".format( # self.speed, # self.pos, # self.hunger, # self.state # )
22.705521
83
0.667657
from worldtools import * from enum import Enum from math import sin, cos, pi from random import uniform class State(Enum): ROAM = 0 REPRODUCE = 1 class Animal: """Class representing Animal in the world""" def __init__(self, world, pos: (float, float), speed: float): """ Initializes the Animal Args: world (World): The world pos ( (float, float) ): Starting position speed (float): Animal speed """ self.speed = speed self.pos = pos self.world = world # Movement variables self.target = None self.movement_angle = uniform(0, pi*2) # Food variables self.hunger = 100 self.eat_count = 0 self._food_checkpoint = 0 # Set state self.state = State.ROAM def move(self) -> Exception: """ Moves an animal based on state Raises: NotImplementedError: Should be overwritten in a derived class Returns: Exception: Will always raise NotImplementedError if called from Animal class """ raise NotImplementedError() def draw(self, screen) -> Exception: """ Draws an animal to the screen Args: screen (pygame.screen): pygame screen Raises: NotImplementedError: Should be overwritten in a derived class Returns: Exception: Will always raise NotImplementedError if called from Animal class """ raise NotImplementedError() def sight_entities(self) -> (["Food"], ["Rabbit"], ["Wolf"]): """ Returns all entites in vision of the Animal Args: self (Animal): self Returns: ([Food], [Rabbit], [Wolf]): Returns a 3-tuple with Food, Rabbit, Wolf in vision """ # Get foods around self foodlist = [] for food in self.world.food: if self != food and self._in_sight(food): foodlist.append(food) # Get rabbits around self rabbitlist = [] for rabbit in self.world.rabbits: if self != rabbit and self._in_sight(rabbit): rabbitlist.append(rabbit) # Get wolves around self wolflist = [] for wolf in self.world.wolves: if self != wolf and self._in_sight(wolf): wolflist.append(wolf) # Sort by distance to self foodlist.sort(key=lambda x: distance(self.pos, x.pos)) rabbitlist.sort(key=lambda x: distance(self.pos, x.pos)) wolflist.sort(key=lambda x: distance(self.pos, x.pos)) return (foodlist, rabbitlist, wolflist) def eat(self, inc: float) -> None: """ Increments eating food source and adds to hunger Args: inc (int): Amount to increase hunger """ # Increment eat count self.eat_count += 1 # Limit to 100 if self.hunger + inc >= 100: self.hunger = 100 else: self.hunger += inc def roam_move(self) -> None: """ Moves Animal in the direction they are facing and slightly changes movement angle """ # Proposed move new_x = self.pos[0] + (self.speed * cos(self.movement_angle)) new_y = self.pos[1] + (self.speed * sin(self.movement_angle)) # Check if valid move while not self.world.in_bounds((new_x, new_y)): # Reset move self.movement_angle += pi/2 new_x = self.pos[0] + (self.speed * cos(self.movement_angle)) new_y = self.pos[1] + (self.speed * sin(self.movement_angle)) # Confirm move self.pos = ( new_x, new_y ) # Adjust movement angle self.movement_angle += uniform(-pi*2 / 36, pi*2 / 36) def _in_sight(self, entity) -> bool: """ Returns if an entity (which has a pos) is in sight of the Animal Args: entity (Animal or Food): Entity to check Returns: bool: True if entity is in sight, False otherwise """ return distance(self.pos, entity.pos) <= self.sight def __repr__(self) -> str: return "{}".format(self.pos) # return "speed={}, pos={}, hunger={}, state={}".format( # self.speed, # self.pos, # self.hunger, # self.state # )
0
0
29436600c26ca109f41fc8cbab442dc2297f2a28
2,261
py
Python
graphgallery/gallery/nodeclas/dgl/appnp.py
dongzizhu/GraphGallery
c65eab42daeb52de5019609fe7b368e30863b4ae
[ "MIT" ]
1
2020-07-29T08:00:32.000Z
2020-07-29T08:00:32.000Z
graphgallery/gallery/nodeclas/dgl/appnp.py
dongzizhu/GraphGallery
c65eab42daeb52de5019609fe7b368e30863b4ae
[ "MIT" ]
null
null
null
graphgallery/gallery/nodeclas/dgl/appnp.py
dongzizhu/GraphGallery
c65eab42daeb52de5019609fe7b368e30863b4ae
[ "MIT" ]
null
null
null
import graphgallery.nn.models.dgl as models from graphgallery.data.sequence import FullBatchSequence from graphgallery import functional as gf from graphgallery.gallery.nodeclas import NodeClasTrainer from graphgallery.gallery.nodeclas import DGL @DGL.register() class APPNP(NodeClasTrainer): """Implementation of approximated personalized propagation of neural predictions (APPNP). `Predict then Propagate: Graph Neural Networks meet Personalized PageRank" <https://arxiv.org/abs/1810.05997>` Tensorflow 1.x implementation: <https://github.com/klicperajo/ppnp> Pytorch implementation: <https://github.com/klicperajo/ppnp> """ def data_step(self, adj_transform="normalize_adj", feat_transform=None): graph = self.graph adj_matrix = gf.get(adj_transform)(graph.adj_matrix) attr_matrix = gf.get(feat_transform)(graph.attr_matrix) feat, g = gf.astensors(attr_matrix, adj_matrix, device=self.data_device) # ``g`` and ``feat`` are cached for later use self.register_cache(feat=feat, g=g) def model_step(self, hids=[64], acts=['relu'], alpha=0.1, K=10, ppr_dropout=0., dropout=0.5, bias=True): model = models.APPNP(self.graph.num_feats, self.graph.num_classes, hids=hids, acts=acts, alpha=alpha, K=K, ppr_dropout=ppr_dropout, dropout=dropout, bias=bias) return model def config_train_data(self, index): labels = self.graph.label[index] sequence = FullBatchSequence(inputs=[self.cache.feat, self.cache.g], y=labels, out_index=index, device=self.data_device, escape=type(self.cache.g)) return sequence
37.683333
81
0.524547
import graphgallery.nn.models.dgl as models from graphgallery.data.sequence import FullBatchSequence from graphgallery import functional as gf from graphgallery.gallery.nodeclas import NodeClasTrainer from graphgallery.gallery.nodeclas import DGL @DGL.register() class APPNP(NodeClasTrainer): """Implementation of approximated personalized propagation of neural predictions (APPNP). `Predict then Propagate: Graph Neural Networks meet Personalized PageRank" <https://arxiv.org/abs/1810.05997>` Tensorflow 1.x implementation: <https://github.com/klicperajo/ppnp> Pytorch implementation: <https://github.com/klicperajo/ppnp> """ def data_step(self, adj_transform="normalize_adj", feat_transform=None): graph = self.graph adj_matrix = gf.get(adj_transform)(graph.adj_matrix) attr_matrix = gf.get(feat_transform)(graph.attr_matrix) feat, g = gf.astensors(attr_matrix, adj_matrix, device=self.data_device) # ``g`` and ``feat`` are cached for later use self.register_cache(feat=feat, g=g) def model_step(self, hids=[64], acts=['relu'], alpha=0.1, K=10, ppr_dropout=0., dropout=0.5, bias=True): model = models.APPNP(self.graph.num_feats, self.graph.num_classes, hids=hids, acts=acts, alpha=alpha, K=K, ppr_dropout=ppr_dropout, dropout=dropout, bias=bias) return model def config_train_data(self, index): labels = self.graph.label[index] sequence = FullBatchSequence(inputs=[self.cache.feat, self.cache.g], y=labels, out_index=index, device=self.data_device, escape=type(self.cache.g)) return sequence
0
0
8212a3f51dd0ff690f96c791a8239dc4e79430a6
2,225
py
Python
whale/src/choiceHeuristic.py
margaal/whale
00f0743a49e383319cec2d38883697774956ffc5
[ "MIT", "Unlicense" ]
null
null
null
whale/src/choiceHeuristic.py
margaal/whale
00f0743a49e383319cec2d38883697774956ffc5
[ "MIT", "Unlicense" ]
null
null
null
whale/src/choiceHeuristic.py
margaal/whale
00f0743a49e383319cec2d38883697774956ffc5
[ "MIT", "Unlicense" ]
null
null
null
#!/usr/bin/env python3 """ This module contains all class to manage variable choice Heuristic """ class VariableChoiceHeuristic: """ Super class to handle variable choice heuristic """ def __init__(self, vars): """ Args: vars (set): variables used in all clauses. """ #: set: All variables of a set of clauses program must be analyzed self.vars = vars def getVariabeTriplet(self, S): """Method to get variable Args: S: assignment set Returns: a triplet (X, v, v') such as X is variable, v is value of X and v' is alternative value of X """ if len(S) == 0: return (min(self.vars), 1, -1) s = set(list(zip(*S))[0]) return (min(self.vars-s), 1, -1) class SimpleVariableChoiceHeuristic(VariableChoiceHeuristic): """ First approach to choose variable, it is simple. we choose the first variable wich is not yet in assignment set (S) """ def __init__(self, vars): super().__init__(vars) def getVariableTriplet(self, S): """Method to get variable Args: S: assignment set Returns: a triplet (X, v, v') such as X is variable, v is value of X and v' is alternative value of X """ return super().getVariabeTriplet(S) class LevelTwoVariableChoiceHeuristic(VariableChoiceHeuristic): """ This approach to choose variable is better than SimpleVariableChoiceHeuristic because it considers unitary clause""" def __init__(self, vars): super().__init__(vars) #: set: All unitary clauses detected in the previous analysis of system of clauses self.unitClauseLitteral:set = set() def getVariableTriplet(self, S): """Method to get variable Args: S(list): assignment set Returns: a set of tuple, i.e a triplet (X, v, v') such as X is variable, v is value of X and v' is alternative value of X """ if len(self.unitClauseLitteral)!=0: return self.unitClauseLitteral return super().getVariabeTriplet(S)
33.712121
127
0.595056
#!/usr/bin/env python3 """ This module contains all class to manage variable choice Heuristic """ class VariableChoiceHeuristic: """ Super class to handle variable choice heuristic """ def __init__(self, vars): """ Args: vars (set): variables used in all clauses. """ #: set: All variables of a set of clauses program must be analyzed self.vars = vars def getVariabeTriplet(self, S): """Method to get variable Args: S: assignment set Returns: a triplet (X, v, v') such as X is variable, v is value of X and v' is alternative value of X """ if len(S) == 0: return (min(self.vars), 1, -1) s = set(list(zip(*S))[0]) return (min(self.vars-s), 1, -1) class SimpleVariableChoiceHeuristic(VariableChoiceHeuristic): """ First approach to choose variable, it is simple. we choose the first variable wich is not yet in assignment set (S) """ def __init__(self, vars): super().__init__(vars) def getVariableTriplet(self, S): """Method to get variable Args: S: assignment set Returns: a triplet (X, v, v') such as X is variable, v is value of X and v' is alternative value of X """ return super().getVariabeTriplet(S) class LevelTwoVariableChoiceHeuristic(VariableChoiceHeuristic): """ This approach to choose variable is better than SimpleVariableChoiceHeuristic because it considers unitary clause""" def __init__(self, vars): super().__init__(vars) #: set: All unitary clauses detected in the previous analysis of system of clauses self.unitClauseLitteral:set = set() def getVariableTriplet(self, S): """Method to get variable Args: S(list): assignment set Returns: a set of tuple, i.e a triplet (X, v, v') such as X is variable, v is value of X and v' is alternative value of X """ if len(self.unitClauseLitteral)!=0: return self.unitClauseLitteral return super().getVariabeTriplet(S)
0
0
4dafefa27f16be57f2e9da833baf8652ada74c41
139
py
Python
mayan/apps/common/forms.py
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
0e4e919fd2e1ded6711354a0330135283e87f8c7
[ "Apache-2.0" ]
2
2021-09-12T19:41:19.000Z
2021-09-12T19:41:20.000Z
mayan/apps/common/forms.py
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
0e4e919fd2e1ded6711354a0330135283e87f8c7
[ "Apache-2.0" ]
37
2021-09-13T01:00:12.000Z
2021-10-02T03:54:30.000Z
mayan/apps/common/forms.py
CMU-313/fall-2021-hw2-451-unavailable-for-legal-reasons
0e4e919fd2e1ded6711354a0330135283e87f8c7
[ "Apache-2.0" ]
1
2021-09-22T13:17:30.000Z
2021-09-22T13:17:30.000Z
from mayan.apps.views.forms import FileDisplayForm class LicenseForm(FileDisplayForm): DIRECTORY = () FILENAME = 'LICENSE'
19.857143
51
0.71223
from mayan.apps.views.forms import FileDisplayForm class LicenseForm(FileDisplayForm): DIRECTORY = () FILENAME = 'LICENSE'
0
0
c7967e7d52f4fe6e677ae72f08ece4b6c9454ea3
213
py
Python
18/2 - Fast Fibbonacci.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
18/2 - Fast Fibbonacci.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
18/2 - Fast Fibbonacci.py
Surferlul/csc-python-solutions
bea99e5e1e344d17fb2cb29d8bcbc6b108e24cee
[ "MIT" ]
null
null
null
def Fibonacci(n): sequence = [0, 1, 1] # Fibonacci(0) is 0, Fibonacci(1) and Fibonacci(2) are 1 for i in range(3, n+1): sequence.append(sequence[i-1] + sequence[i-2]) return sequence[n]
35.5
82
0.596244
def Fibonacci(n): sequence = [0, 1, 1] # Fibonacci(0) is 0, Fibonacci(1) and Fibonacci(2) are 1 for i in range(3, n+1): sequence.append(sequence[i-1] + sequence[i-2]) return sequence[n]
0
0
87e02e8e46132c97db5e7e96372e3d6b7ffa4c3f
21,615
py
Python
References/Original Docs/TrussCalc_v0.py
lorcan2440/SimpleTrussCalculator
3c063fbc5f1987e9a700e312b763b36d4ec22495
[ "MIT" ]
1
2021-07-29T14:34:08.000Z
2021-07-29T14:34:08.000Z
References/Original Docs/TrussCalc_v0.py
lorcan2440/SimpleTrussCalculator
3c063fbc5f1987e9a700e312b763b36d4ec22495
[ "MIT" ]
1
2021-07-30T17:34:42.000Z
2021-07-30T17:34:42.000Z
References/Original Docs/TrussCalc_v0.py
lorcan2440/SimpleTrussCalculator
3c063fbc5f1987e9a700e312b763b36d4ec22495
[ "MIT" ]
1
2022-03-13T11:01:34.000Z
2022-03-13T11:01:34.000Z
from matplotlib import pyplot as plt import math, sigfig, warnings # module "sigfig" requires "pip install sigfig" at command line import numpy as np def get_all_trusses(): return [truss for truss in Truss] class IterTruss(type): def __iter__(cls): return iter(cls._allTrusses) class Truss(metaclass = IterTruss): _allTrusses = [] # TRUSS METACLASS INITIATORS class IterJoint(type): def __iter__(cls): return iter(cls._allJoints) class IterBar(type): def __iter__(cls): return iter(cls._allBars) class IterLoad(type): def __iter__(cls): return iter(cls._allLoads) class IterSupport(type): def __iter__(cls): return iter(cls._allSupports) def __init__(self, bar_params: dict = None, units = 'kN, mm'): self._allTrusses.append(self) if bar_params == None: if units == 'N, m': self.default_params = {"b" : 0.016, "t" : 0.004, "D" : 0.020, "E" : 2.1e11} elif units == 'kN, mm': self.default_params = {"b" : 1.6, "t" : 4, "D" : 20, "E" : 210} else: raise ValueError('Units must be either "N, m" or "kN, mm".') else: self.default_params = bar_params self.units = units # PARTS OF THE TRUSS (INNER CLASSES) class Joint(metaclass = IterJoint): _allJoints = [] def __init__(self, truss: object, name: str, x: float, y: float): self._allJoints.append(self) self.name = name self.truss = truss self.x = x self.y = y self.loads = {} def form_equation(self): self.truss.get_all_bars_connected_to_joint(self) class Bar(metaclass = IterBar): _allBars = [] def __init__(self, truss: object, name: str, first_joint: object, second_joint: object, my_params: dict = None): self._allBars.append(self) self.name = name self.first_joint, self.first_joint_name = first_joint, first_joint.name self.second_joint, self.second_joint_name = second_joint, second_joint.name if my_params == None: self.params = truss.default_params else: self.params = my_params self.b, self.t, self.D, self.E, self._max = self.params["b"], self.params["t"], self.params["D"], self.params["E"], self.params["_max"] def length(self): self.L = math.sqrt((self.first_joint.x - self.second_joint.x)**2 + (self.first_joint.y - self.second_joint.y)**2) return self.L def area(self): self.A = (self.b ** 2 - (self.b - self.t) ** 2) * 1.03 def effective_area(self): self.A_eff = (1.5 * self.b - self.D) * 0.9 * self.t return self.A_eff def buckling_ratio(self): self.buckling_ratio = self.length() / self.b return self.buckling_ratio class Load(metaclass = IterLoad): _allLoads = [] def __init__(self, name: str, joint: object, x_comp: float = 0.0, y_comp: float = 0.0): self._allLoads.append(self) self.name = name self.joint = joint self.x, self.y = x_comp, y_comp self.magnitude = math.sqrt(self.x ** 2 + self.y ** 2) self.direction = math.atan2(self.y, self.x) joint.loads[self.name] = (self.x, self.y) class Support(metaclass = IterSupport): _allSupports = [] def __init__(self, truss: object, name: str, joint: object, support_type: str = 'encastre', roller_normal_vector: tuple = (1, 0)): self._allSupports.append(self) self.name = name self.joint = joint self.type = support_type self.dir = roller_normal_vector if self.type in ('encastre', 'pin'): joint.loads['Reaction @ {}'.format(self.name)] = (None, None) # Independent unknowns: fill in later elif self.type == 'roller': joint.loads['Reaction @ {}'.format(self.name)] = (None * self.dir[0], None * self.dir[1]) # Dependent unknowns: fill in later else: raise ValueError('Support type must be "encastre", "roller" or " pin".') # TRUSS METACLASS METHODS def get_all_bars(self, str_names_only: bool = False): if not str_names_only: return [bar for bar in Truss.Bar] else: return [bar.name for bar in Truss.Bar] def get_all_joints(self, str_names_only: bool = False): if not str_names_only: return [joint for joint in Truss.Joint] else: return [joint.name for joint in Truss.Joint] def get_all_bars_connected_to_joint(self, joint: object, str_names_only: bool = False): if not str_names_only: return [bar for bar in Truss.Bar if joint.name in (bar.first_joint.name, bar.second_joint.name)] else: return [bar.name for bar in Truss.Bar if joint.name in (bar.first_joint.name, bar.second_joint.name)] def get_all_joints_connected_to_bar(self, bar: object, str_names_only: bool = False): if not str_names_only: return [bar.first_joint, bar.second_joint] else: return [bar.first_joint.name, bar.second_joint.name] def get_all_loads(self): return [load for load in Truss.Load] def get_all_loads_at_joint(self, joint: object): return [load for load in Truss.Load if load.joint == joint] def get_all_loads_at_joint_by_name(self, joint_name: str): return [load for load in Truss.Load if load.joint.name == joint_name] def get_all_supports(self): return [support for support in Truss.Support] def get_bar_by_name(self, bar_name: str): for bar in Truss.Bar: if bar.name == bar_name: return bar def is_statically_determinate(self): b = len(self.get_all_bars(str_names_only = True)) F = sum([2 if support.type in ('encastre', 'pin') else 1 for support in Truss.Support]) j = len(self.get_all_joints(str_names_only = True)) return b + F == 2 * j def calculate(self): # Get a list of the distinct joint names, number of equations to form = 2 * number of joints joint_names = self.get_all_joints(str_names_only = True) number_of_unknowns = 2 * len(joint_names) # List of dictionaries for unknowns, given default zero values unknowns = {} wanted_vars = [] for bar in self.get_all_bars(): unknowns['Tension in ' + bar.name] = 0 wanted_vars.append('Tension in ' + bar.name) for support in self.get_all_supports(): unknowns['Horizontal reaction at ' + support.name] = 0 wanted_vars.append('Horizontal reaction at ' + support.joint.name) unknowns['Vertical reaction at ' + support.name] = 0 wanted_vars.append('Vertical reaction at ' + support.joint.name) unknowns = [unknowns for x in range(number_of_unknowns)] # Create a list of joint names, with each entry included twice and then flatten the list joint_enum = [[joint_names[i], joint_names[i]] for i in range(len(joint_names))] joint_enum = [item for sublist in joint_enum for item in sublist] # Create empty dictionary of all equations in all unknowns unknowns = {"Equation {}, resolve {} at {}".format( x + 1, 'horizontally' if (x + 1) % 2 == 1 else 'vertically', joint_enum[x]) : unknowns[x] for x in range(number_of_unknowns)} all_directions = {} for joint in self.get_all_joints(): # Reset the directions dictionary for this joint directions = {} connected_bars = self.get_all_bars_connected_to_joint(joint) # Get the anticlockwise (polar) angle of each connected joint relative to this joint which have bars for bar in connected_bars: connected_joints = self.get_all_joints_connected_to_bar(bar) if joint == connected_joints[0]: angle = math.atan2(connected_joints[1].y - joint.y, connected_joints[1].x - joint.x) elif joint == connected_joints[1]: angle = math.atan2(connected_joints[0].y - joint.y, connected_joints[0].x - joint.x) directions['Tension in ' + bar.name] = angle # If there are reactions at this joint, store their directions too if any([bool(s.joint.name == joint.name) for s in self.get_all_supports()]): directions['Horizontal reaction at ' + joint.name] = 0 directions['Vertical reaction at ' + joint.name] = math.pi/2 # If there are external loads at this joint, store their directions too for l in self.get_all_loads_at_joint(joint): directions['Horizontal component of {} at {}'.format(l.name , joint.name)] = 0 directions['Vertical component of {} at {}'.format(l.name , joint.name)] = math.pi/2 all_directions[joint.name] = directions # Store the coefficients of the unknowns in each equation coefficients = [] for joint_name in joint_names: current_line = [] for var in wanted_vars: try: current_line.append(round(math.cos(all_directions[joint_name][var]), 10)) except KeyError: current_line.append(0) coefficients.append(current_line) current_line = [] for var in wanted_vars: try: current_line.append(round(math.sin(all_directions[joint_name][var]), 10)) except KeyError: current_line.append(0) coefficients.append(current_line) # Store the constants of each equation constants = [] for joint_name in joint_names: try: constants.append([-1 * sum(L.x) for L in self.get_all_loads_at_joint_by_name(joint_name)]) constants.append([-1 * sum(L.y) for L in self.get_all_loads_at_joint_by_name(joint_name)]) except TypeError: constants.append([-1 * L.x for L in self.get_all_loads_at_joint_by_name(joint_name)]) constants.append([-1 * L.y for L in self.get_all_loads_at_joint_by_name(joint_name)]) # Sanitise load data for i in range(len(constants)): if constants[i] == [] or constants[i] == [None]: constants[i] = [0] # Solve the system M, B = np.matrix(np.array(coefficients)), np.matrix(constants) X = np.linalg.inv(M) * B # Match values back to variable names and return output_dict = {} i = 0 for bar in self.get_all_bars(): output_dict[bar.name] = float(X[i]) i += 1 for support in self.get_all_supports(): output_dict[support.name] = (float(X[i]), float(X[i+1])) i += 2 return output_dict # TRUSS RESULTS CLASS class Result: def __init__(self, truss, sig_figs = None): self.results = truss.calculate() self.tensions, self.reactions, self.stresses, self.strains, self.buckling_ratios = {}, {}, {}, {}, {} self.sig_figs = sig_figs warnings.filterwarnings('ignore') self.get_tensions(truss) self.get_reactions(truss) self.get_stresses(truss) self.get_buckling_ratios(truss) self.get_strains(truss) self.round_data() def round_data(self): for item in list(self.tensions.keys()): try: self.tensions[item] = sigfig.round(self.tensions[item], self.sig_figs) self.stresses[item] = sigfig.round(self.stresses[item], self.sig_figs) self.strains[item] = sigfig.round(self.strains[item], self.sig_figs) self.buckling_ratios[item] = sigfig.round(self.buckling_ratios[item], self.sig_figs) except KeyError: continue for item in list(self.reactions.keys()): try: self.reactions[item] = (sigfig.round(self.reactions[item][0], self.sig_figs), sigfig.round(self.reactions[item][1], self.sig_figs)) except KeyError: continue def get_tensions(self, truss): for item in self.results: if type(self.results[item]) == float: if abs(self.results[item]) < 1e-10: self.tensions.update({item : 0}) else: self.tensions.update({item : self.results[item]}) def get_reactions(self, truss): for item in self.results: if type(self.results[item]) == tuple: self.reactions.update({item : self.results[item]}) def get_stresses(self, truss): for item in self.results: if type(self.results[item]) == float: self.stresses.update({item : self.tensions[item] / truss.get_bar_by_name(item).effective_area()}) def get_strains(self, truss): for item in self.results: if type(self.results[item]) == float: self.strains.update({item : self.stresses[item] / truss.get_bar_by_name(item).E}) def get_buckling_ratios(self, truss): for item in self.results: if type(self.results[item]) == float and self.results[item] < 0: self.buckling_ratios.update({item : truss.get_bar_by_name(item).buckling_ratio()}) # TRUSS INNER CLASSES END HERE def print_results(results: object, truss: object, as_str: bool = True): if as_str: print('Axial forces are: (positive = tension; negative = compression) \n' + str(results.tensions)) print('\nAxial stresses are: \n' + str(results.stresses)) ''' print('\nReaction forces are (horizontal, vertical) components (signs consistent with coordinate system): \n' + str(results.reactions)) ''' print('Buckling ratios are: \n' + str(results.buckling_ratios)) print('Strains are: \n' + str(results.strains)) if results.sig_figs == None: print('\nUnits are {}, {}'.format(truss.units.split(',')[0], 'values not rounded')) else: print('\nUnits are {}, {}'.format(truss.units.split(',')[0], 'values rounded to {} sig figs'.format(results.sig_figs))) def plot_diagram(truss: object, results: object, show_reactions = False): # Find a suitable length-scale to make the annotations look nicer arrow_sizes = [x.length() for x in truss.get_all_bars()] arrow_sizes = sum(arrow_sizes)/len(arrow_sizes) * 0.1 # Plot all joints plt.plot([joint.x for joint in truss.get_all_joints()], [joint.y for joint in truss.get_all_joints()], 'o') # Plot all bars and label their axial forces in the legend for bar in truss.get_all_bars(): plt.plot([bar.first_joint.x, bar.second_joint.x], [bar.first_joint.y, bar.second_joint.y], label = '{}'.format(bar.name + ': ' + str(results.tensions[bar.name]) + ' ' + truss.units.split(',')[0]), zorder = 0) # If the bar is nearly vertical, label its name to its right, otherwise label it above if 80 * (math.pi / 180) <= abs(math.atan2(bar.second_joint.y - bar.first_joint.y, bar.second_joint.x - bar.first_joint.x)) <= 100 * (math.pi / 180): plt.text(sum([bar.first_joint.x, bar.second_joint.x])/2 + arrow_sizes / 3, sum([bar.first_joint.y, bar.second_joint.y])/2, bar.name) else: plt.text(sum([bar.first_joint.x, bar.second_joint.x])/2, sum([bar.first_joint.y, bar.second_joint.y])/2 + arrow_sizes / 3, bar.name) # Plot all support points with their reactions as arrows for support in truss.get_all_supports(): plt.plot(support.joint.x, support.joint.y, '*', color = 'red', label = support.name + ': ' + str(results.reactions[support.name]) + ' ' + truss.units.split(',')[0]) for support in truss.get_all_supports(): if show_reactions == True: direction_of_reaction = math.atan2(results.reactions[support.name][1], results.reactions[support.name][0]) plt.arrow(support.joint.x, support.joint.y, arrow_sizes, 0, head_width = arrow_sizes / 5, head_length = arrow_sizes / 4) plt.arrow(support.joint.x, support.joint.y, 0, arrow_sizes, head_width = arrow_sizes / 5, head_length = arrow_sizes / 4) plt.text(support.joint.x + arrow_sizes / 4, support.joint.y + arrow_sizes / 4, support.name, label = support.name + ': ' + str(results.reactions[support.name]) + ' ' + truss.units.split(',')[0]) # Plot all loads for load in truss.get_all_loads(): direction_of_load = math.atan2(load.y, load.x) plt.arrow(load.joint.x, load.joint.y, arrow_sizes * math.cos(direction_of_load), arrow_sizes * math.sin(direction_of_load), head_width = arrow_sizes / 5, head_length = arrow_sizes / 4) plt.text(sum([load.joint.x, load.joint.x + arrow_sizes * math.cos(direction_of_load)])/2 + arrow_sizes / 3, sum([load.joint.y + load.joint.y, arrow_sizes * math.sin(direction_of_load)])/2, load.name + ': (' + str(load.x) + ', ' + str(load.y) + ') ' + truss.units.split(',')[0]) # Graphical improvements plt.legend(loc = 'upper right') plt.autoscale(enable = True, axis = 'both') plt.axis('equal') plt.show() # MAIN FUNCTIONS END HERE def build_truss(x, print_res = True): # Step 0: set the physical properties and name the truss custom_params = {"b" : 12.5, "t" : 0.7, "D" : 5, "E" : 210, "_max": 0.216} myTruss = Truss(custom_params, 'kN, mm') # Step 1: Define the joints (nodes) joint_A = myTruss.Joint(myTruss, "Joint A", 0, 0) joint_B = myTruss.Joint(myTruss, "Joint B", 290, -90) joint_C = myTruss.Joint(myTruss, "Joint C", 815, 127.5) joint_D = myTruss.Joint(myTruss, "Joint D", 290, 345) joint_E = myTruss.Joint(myTruss, "Joint E", 0, 255) joint_F = myTruss.Joint(myTruss, "Joint F", 220.836, 127.5) weak = {"b" : 12.5, "t" : 0.7, "D" : 5, "E" : 210, "_max": 0.216} medium_1 = {"b" : 16, "t" : 0.9, "D" : 5, "E" : 210, "_max": 0.216} medium_2 = {"b" : 16, "t" : 1.1, "D" : 5, "E" : 210, "_max": 0.216} strong = {"b" : 19, "t" : 1.1, "D" : 5, "E" : 210, "_max": 0.216} # Step 2: Define the bars going between any pair of joints bar_1 = myTruss.Bar(myTruss, "Bar 1", joint_A, joint_B, medium_2) bar_2 = myTruss.Bar(myTruss, "Bar 2", joint_B, joint_C, strong) bar_3 = myTruss.Bar(myTruss, "Bar 3", joint_C, joint_D, medium_1) bar_4 = myTruss.Bar(myTruss, "Bar 4", joint_D, joint_E, medium_1) bar_5 = myTruss.Bar(myTruss, "Bar 5", joint_E, joint_F, medium_1) bar_6 = myTruss.Bar(myTruss, "Bar 6", joint_F, joint_A, medium_2) bar_7 = myTruss.Bar(myTruss, "Bar 7", joint_F, joint_D, medium_1) bar_8 = myTruss.Bar(myTruss, "Bar 8", joint_F, joint_B, weak) # Step 3: Define the loads acting on any joint load_1 = myTruss.Load("W", joint_C, 0, -0.675 * 2) # Step 4: Define the supports acting at any joint support_1 = myTruss.Support(myTruss, "Support 1", joint_A, 'encastre') support_2 = myTruss.Support(myTruss, "Support 2", joint_E, 'encastre') # Step 5: Calculate the truss and print the results my_results = myTruss.Result(myTruss, sig_figs = 3) if print_res == True: print_results(my_results, myTruss, as_str = True) if True: plot_diagram(myTruss, my_results) else: return my_results build_truss(815, True)
46.684665
152
0.556697
from matplotlib import pyplot as plt import math, sigfig, warnings # module "sigfig" requires "pip install sigfig" at command line import numpy as np def get_all_trusses(): return [truss for truss in Truss] class IterTruss(type): def __iter__(cls): return iter(cls._allTrusses) class Truss(metaclass = IterTruss): _allTrusses = [] # TRUSS METACLASS INITIATORS class IterJoint(type): def __iter__(cls): return iter(cls._allJoints) class IterBar(type): def __iter__(cls): return iter(cls._allBars) class IterLoad(type): def __iter__(cls): return iter(cls._allLoads) class IterSupport(type): def __iter__(cls): return iter(cls._allSupports) def __init__(self, bar_params: dict = None, units = 'kN, mm'): self._allTrusses.append(self) if bar_params == None: if units == 'N, m': self.default_params = {"b" : 0.016, "t" : 0.004, "D" : 0.020, "E" : 2.1e11} elif units == 'kN, mm': self.default_params = {"b" : 1.6, "t" : 4, "D" : 20, "E" : 210} else: raise ValueError('Units must be either "N, m" or "kN, mm".') else: self.default_params = bar_params self.units = units # PARTS OF THE TRUSS (INNER CLASSES) class Joint(metaclass = IterJoint): _allJoints = [] def __init__(self, truss: object, name: str, x: float, y: float): self._allJoints.append(self) self.name = name self.truss = truss self.x = x self.y = y self.loads = {} def form_equation(self): self.truss.get_all_bars_connected_to_joint(self) class Bar(metaclass = IterBar): _allBars = [] def __init__(self, truss: object, name: str, first_joint: object, second_joint: object, my_params: dict = None): self._allBars.append(self) self.name = name self.first_joint, self.first_joint_name = first_joint, first_joint.name self.second_joint, self.second_joint_name = second_joint, second_joint.name if my_params == None: self.params = truss.default_params else: self.params = my_params self.b, self.t, self.D, self.E, self.σ_max = self.params["b"], self.params["t"], self.params["D"], self.params["E"], self.params["σ_max"] def length(self): self.L = math.sqrt((self.first_joint.x - self.second_joint.x)**2 + (self.first_joint.y - self.second_joint.y)**2) return self.L def area(self): self.A = (self.b ** 2 - (self.b - self.t) ** 2) * 1.03 def effective_area(self): self.A_eff = (1.5 * self.b - self.D) * 0.9 * self.t return self.A_eff def buckling_ratio(self): self.buckling_ratio = self.length() / self.b return self.buckling_ratio class Load(metaclass = IterLoad): _allLoads = [] def __init__(self, name: str, joint: object, x_comp: float = 0.0, y_comp: float = 0.0): self._allLoads.append(self) self.name = name self.joint = joint self.x, self.y = x_comp, y_comp self.magnitude = math.sqrt(self.x ** 2 + self.y ** 2) self.direction = math.atan2(self.y, self.x) joint.loads[self.name] = (self.x, self.y) class Support(metaclass = IterSupport): _allSupports = [] def __init__(self, truss: object, name: str, joint: object, support_type: str = 'encastre', roller_normal_vector: tuple = (1, 0)): self._allSupports.append(self) self.name = name self.joint = joint self.type = support_type self.dir = roller_normal_vector if self.type in ('encastre', 'pin'): joint.loads['Reaction @ {}'.format(self.name)] = (None, None) # Independent unknowns: fill in later elif self.type == 'roller': joint.loads['Reaction @ {}'.format(self.name)] = (None * self.dir[0], None * self.dir[1]) # Dependent unknowns: fill in later else: raise ValueError('Support type must be "encastre", "roller" or " pin".') # TRUSS METACLASS METHODS def get_all_bars(self, str_names_only: bool = False): if not str_names_only: return [bar for bar in Truss.Bar] else: return [bar.name for bar in Truss.Bar] def get_all_joints(self, str_names_only: bool = False): if not str_names_only: return [joint for joint in Truss.Joint] else: return [joint.name for joint in Truss.Joint] def get_all_bars_connected_to_joint(self, joint: object, str_names_only: bool = False): if not str_names_only: return [bar for bar in Truss.Bar if joint.name in (bar.first_joint.name, bar.second_joint.name)] else: return [bar.name for bar in Truss.Bar if joint.name in (bar.first_joint.name, bar.second_joint.name)] def get_all_joints_connected_to_bar(self, bar: object, str_names_only: bool = False): if not str_names_only: return [bar.first_joint, bar.second_joint] else: return [bar.first_joint.name, bar.second_joint.name] def get_all_loads(self): return [load for load in Truss.Load] def get_all_loads_at_joint(self, joint: object): return [load for load in Truss.Load if load.joint == joint] def get_all_loads_at_joint_by_name(self, joint_name: str): return [load for load in Truss.Load if load.joint.name == joint_name] def get_all_supports(self): return [support for support in Truss.Support] def get_bar_by_name(self, bar_name: str): for bar in Truss.Bar: if bar.name == bar_name: return bar def is_statically_determinate(self): b = len(self.get_all_bars(str_names_only = True)) F = sum([2 if support.type in ('encastre', 'pin') else 1 for support in Truss.Support]) j = len(self.get_all_joints(str_names_only = True)) return b + F == 2 * j def calculate(self): # Get a list of the distinct joint names, number of equations to form = 2 * number of joints joint_names = self.get_all_joints(str_names_only = True) number_of_unknowns = 2 * len(joint_names) # List of dictionaries for unknowns, given default zero values unknowns = {} wanted_vars = [] for bar in self.get_all_bars(): unknowns['Tension in ' + bar.name] = 0 wanted_vars.append('Tension in ' + bar.name) for support in self.get_all_supports(): unknowns['Horizontal reaction at ' + support.name] = 0 wanted_vars.append('Horizontal reaction at ' + support.joint.name) unknowns['Vertical reaction at ' + support.name] = 0 wanted_vars.append('Vertical reaction at ' + support.joint.name) unknowns = [unknowns for x in range(number_of_unknowns)] # Create a list of joint names, with each entry included twice and then flatten the list joint_enum = [[joint_names[i], joint_names[i]] for i in range(len(joint_names))] joint_enum = [item for sublist in joint_enum for item in sublist] # Create empty dictionary of all equations in all unknowns unknowns = {"Equation {}, resolve {} at {}".format( x + 1, 'horizontally' if (x + 1) % 2 == 1 else 'vertically', joint_enum[x]) : unknowns[x] for x in range(number_of_unknowns)} all_directions = {} for joint in self.get_all_joints(): # Reset the directions dictionary for this joint directions = {} connected_bars = self.get_all_bars_connected_to_joint(joint) # Get the anticlockwise (polar) angle of each connected joint relative to this joint which have bars for bar in connected_bars: connected_joints = self.get_all_joints_connected_to_bar(bar) if joint == connected_joints[0]: angle = math.atan2(connected_joints[1].y - joint.y, connected_joints[1].x - joint.x) elif joint == connected_joints[1]: angle = math.atan2(connected_joints[0].y - joint.y, connected_joints[0].x - joint.x) directions['Tension in ' + bar.name] = angle # If there are reactions at this joint, store their directions too if any([bool(s.joint.name == joint.name) for s in self.get_all_supports()]): directions['Horizontal reaction at ' + joint.name] = 0 directions['Vertical reaction at ' + joint.name] = math.pi/2 # If there are external loads at this joint, store their directions too for l in self.get_all_loads_at_joint(joint): directions['Horizontal component of {} at {}'.format(l.name , joint.name)] = 0 directions['Vertical component of {} at {}'.format(l.name , joint.name)] = math.pi/2 all_directions[joint.name] = directions # Store the coefficients of the unknowns in each equation coefficients = [] for joint_name in joint_names: current_line = [] for var in wanted_vars: try: current_line.append(round(math.cos(all_directions[joint_name][var]), 10)) except KeyError: current_line.append(0) coefficients.append(current_line) current_line = [] for var in wanted_vars: try: current_line.append(round(math.sin(all_directions[joint_name][var]), 10)) except KeyError: current_line.append(0) coefficients.append(current_line) # Store the constants of each equation constants = [] for joint_name in joint_names: try: constants.append([-1 * sum(L.x) for L in self.get_all_loads_at_joint_by_name(joint_name)]) constants.append([-1 * sum(L.y) for L in self.get_all_loads_at_joint_by_name(joint_name)]) except TypeError: constants.append([-1 * L.x for L in self.get_all_loads_at_joint_by_name(joint_name)]) constants.append([-1 * L.y for L in self.get_all_loads_at_joint_by_name(joint_name)]) # Sanitise load data for i in range(len(constants)): if constants[i] == [] or constants[i] == [None]: constants[i] = [0] # Solve the system M, B = np.matrix(np.array(coefficients)), np.matrix(constants) X = np.linalg.inv(M) * B # Match values back to variable names and return output_dict = {} i = 0 for bar in self.get_all_bars(): output_dict[bar.name] = float(X[i]) i += 1 for support in self.get_all_supports(): output_dict[support.name] = (float(X[i]), float(X[i+1])) i += 2 return output_dict # TRUSS RESULTS CLASS class Result: def __init__(self, truss, sig_figs = None): self.results = truss.calculate() self.tensions, self.reactions, self.stresses, self.strains, self.buckling_ratios = {}, {}, {}, {}, {} self.sig_figs = sig_figs warnings.filterwarnings('ignore') self.get_tensions(truss) self.get_reactions(truss) self.get_stresses(truss) self.get_buckling_ratios(truss) self.get_strains(truss) self.round_data() def round_data(self): for item in list(self.tensions.keys()): try: self.tensions[item] = sigfig.round(self.tensions[item], self.sig_figs) self.stresses[item] = sigfig.round(self.stresses[item], self.sig_figs) self.strains[item] = sigfig.round(self.strains[item], self.sig_figs) self.buckling_ratios[item] = sigfig.round(self.buckling_ratios[item], self.sig_figs) except KeyError: continue for item in list(self.reactions.keys()): try: self.reactions[item] = (sigfig.round(self.reactions[item][0], self.sig_figs), sigfig.round(self.reactions[item][1], self.sig_figs)) except KeyError: continue def get_tensions(self, truss): for item in self.results: if type(self.results[item]) == float: if abs(self.results[item]) < 1e-10: self.tensions.update({item : 0}) else: self.tensions.update({item : self.results[item]}) def get_reactions(self, truss): for item in self.results: if type(self.results[item]) == tuple: self.reactions.update({item : self.results[item]}) def get_stresses(self, truss): for item in self.results: if type(self.results[item]) == float: self.stresses.update({item : self.tensions[item] / truss.get_bar_by_name(item).effective_area()}) def get_strains(self, truss): for item in self.results: if type(self.results[item]) == float: self.strains.update({item : self.stresses[item] / truss.get_bar_by_name(item).E}) def get_buckling_ratios(self, truss): for item in self.results: if type(self.results[item]) == float and self.results[item] < 0: self.buckling_ratios.update({item : truss.get_bar_by_name(item).buckling_ratio()}) # TRUSS INNER CLASSES END HERE def print_results(results: object, truss: object, as_str: bool = True): if as_str: print('Axial forces are: (positive = tension; negative = compression) \n' + str(results.tensions)) print('\nAxial stresses are: \n' + str(results.stresses)) ''' print('\nReaction forces are (horizontal, vertical) components (signs consistent with coordinate system): \n' + str(results.reactions)) ''' print('Buckling ratios are: \n' + str(results.buckling_ratios)) print('Strains are: \n' + str(results.strains)) if results.sig_figs == None: print('\nUnits are {}, {}'.format(truss.units.split(',')[0], 'values not rounded')) else: print('\nUnits are {}, {}'.format(truss.units.split(',')[0], 'values rounded to {} sig figs'.format(results.sig_figs))) def plot_diagram(truss: object, results: object, show_reactions = False): # Find a suitable length-scale to make the annotations look nicer arrow_sizes = [x.length() for x in truss.get_all_bars()] arrow_sizes = sum(arrow_sizes)/len(arrow_sizes) * 0.1 # Plot all joints plt.plot([joint.x for joint in truss.get_all_joints()], [joint.y for joint in truss.get_all_joints()], 'o') # Plot all bars and label their axial forces in the legend for bar in truss.get_all_bars(): plt.plot([bar.first_joint.x, bar.second_joint.x], [bar.first_joint.y, bar.second_joint.y], label = '{}'.format(bar.name + ': ' + str(results.tensions[bar.name]) + ' ' + truss.units.split(',')[0]), zorder = 0) # If the bar is nearly vertical, label its name to its right, otherwise label it above if 80 * (math.pi / 180) <= abs(math.atan2(bar.second_joint.y - bar.first_joint.y, bar.second_joint.x - bar.first_joint.x)) <= 100 * (math.pi / 180): plt.text(sum([bar.first_joint.x, bar.second_joint.x])/2 + arrow_sizes / 3, sum([bar.first_joint.y, bar.second_joint.y])/2, bar.name) else: plt.text(sum([bar.first_joint.x, bar.second_joint.x])/2, sum([bar.first_joint.y, bar.second_joint.y])/2 + arrow_sizes / 3, bar.name) # Plot all support points with their reactions as arrows for support in truss.get_all_supports(): plt.plot(support.joint.x, support.joint.y, '*', color = 'red', label = support.name + ': ' + str(results.reactions[support.name]) + ' ' + truss.units.split(',')[0]) for support in truss.get_all_supports(): if show_reactions == True: direction_of_reaction = math.atan2(results.reactions[support.name][1], results.reactions[support.name][0]) plt.arrow(support.joint.x, support.joint.y, arrow_sizes, 0, head_width = arrow_sizes / 5, head_length = arrow_sizes / 4) plt.arrow(support.joint.x, support.joint.y, 0, arrow_sizes, head_width = arrow_sizes / 5, head_length = arrow_sizes / 4) plt.text(support.joint.x + arrow_sizes / 4, support.joint.y + arrow_sizes / 4, support.name, label = support.name + ': ' + str(results.reactions[support.name]) + ' ' + truss.units.split(',')[0]) # Plot all loads for load in truss.get_all_loads(): direction_of_load = math.atan2(load.y, load.x) plt.arrow(load.joint.x, load.joint.y, arrow_sizes * math.cos(direction_of_load), arrow_sizes * math.sin(direction_of_load), head_width = arrow_sizes / 5, head_length = arrow_sizes / 4) plt.text(sum([load.joint.x, load.joint.x + arrow_sizes * math.cos(direction_of_load)])/2 + arrow_sizes / 3, sum([load.joint.y + load.joint.y, arrow_sizes * math.sin(direction_of_load)])/2, load.name + ': (' + str(load.x) + ', ' + str(load.y) + ') ' + truss.units.split(',')[0]) # Graphical improvements plt.legend(loc = 'upper right') plt.autoscale(enable = True, axis = 'both') plt.axis('equal') plt.show() # MAIN FUNCTIONS END HERE def build_truss(x, print_res = True): # Step 0: set the physical properties and name the truss custom_params = {"b" : 12.5, "t" : 0.7, "D" : 5, "E" : 210, "σ_max": 0.216} myTruss = Truss(custom_params, 'kN, mm') # Step 1: Define the joints (nodes) joint_A = myTruss.Joint(myTruss, "Joint A", 0, 0) joint_B = myTruss.Joint(myTruss, "Joint B", 290, -90) joint_C = myTruss.Joint(myTruss, "Joint C", 815, 127.5) joint_D = myTruss.Joint(myTruss, "Joint D", 290, 345) joint_E = myTruss.Joint(myTruss, "Joint E", 0, 255) joint_F = myTruss.Joint(myTruss, "Joint F", 220.836, 127.5) weak = {"b" : 12.5, "t" : 0.7, "D" : 5, "E" : 210, "σ_max": 0.216} medium_1 = {"b" : 16, "t" : 0.9, "D" : 5, "E" : 210, "σ_max": 0.216} medium_2 = {"b" : 16, "t" : 1.1, "D" : 5, "E" : 210, "σ_max": 0.216} strong = {"b" : 19, "t" : 1.1, "D" : 5, "E" : 210, "σ_max": 0.216} # Step 2: Define the bars going between any pair of joints bar_1 = myTruss.Bar(myTruss, "Bar 1", joint_A, joint_B, medium_2) bar_2 = myTruss.Bar(myTruss, "Bar 2", joint_B, joint_C, strong) bar_3 = myTruss.Bar(myTruss, "Bar 3", joint_C, joint_D, medium_1) bar_4 = myTruss.Bar(myTruss, "Bar 4", joint_D, joint_E, medium_1) bar_5 = myTruss.Bar(myTruss, "Bar 5", joint_E, joint_F, medium_1) bar_6 = myTruss.Bar(myTruss, "Bar 6", joint_F, joint_A, medium_2) bar_7 = myTruss.Bar(myTruss, "Bar 7", joint_F, joint_D, medium_1) bar_8 = myTruss.Bar(myTruss, "Bar 8", joint_F, joint_B, weak) # Step 3: Define the loads acting on any joint load_1 = myTruss.Load("W", joint_C, 0, -0.675 * 2) # Step 4: Define the supports acting at any joint support_1 = myTruss.Support(myTruss, "Support 1", joint_A, 'encastre') support_2 = myTruss.Support(myTruss, "Support 2", joint_E, 'encastre') # Step 5: Calculate the truss and print the results my_results = myTruss.Result(myTruss, sig_figs = 3) if print_res == True: print_results(my_results, myTruss, as_str = True) if True: plot_diagram(myTruss, my_results) else: return my_results build_truss(815, True)
14
0
0e2883fc0a40eaacd3d346a142bc2ccbdbc6a50c
4,477
py
Python
src/dual_gazebo/src/control_key_drive.py
diddytpq/Dual-Motion-robot-gazebo
19d8098f9931ee7ded91f8242efdc176c418db8c
[ "MIT" ]
null
null
null
src/dual_gazebo/src/control_key_drive.py
diddytpq/Dual-Motion-robot-gazebo
19d8098f9931ee7ded91f8242efdc176c418db8c
[ "MIT" ]
null
null
null
src/dual_gazebo/src/control_key_drive.py
diddytpq/Dual-Motion-robot-gazebo
19d8098f9931ee7ded91f8242efdc176c418db8c
[ "MIT" ]
null
null
null
import rospy import numpy as np from std_msgs.msg import Float64 from gazebo_msgs.srv import * from geometry_msgs.msg import * import sys, select, os import roslib if os.name == 'nt': import msvcrt else: import tty, termios roslib.load_manifest('dual_gazebo') def qua2eular(x,y,z,w): q_x = x q_y = y q_z = z q_w = w t0 = +2.0 * (q_w * q_x + q_y * q_z) t1 = +1.0 - 2.0 * (q_x * q_x + q_y * q_y) roll_x = np.arctan2(t0, t1) t2 = +2.0 * (q_w * q_y - q_z * q_x) t2 = +1.0 if t2 > +1.0 else t2 t2 = -1.0 if t2 < -1.0 else t2 pitch_y = np.arcsin(t2) t3 = +2.0 * (q_w * q_z + q_x * q_y) t4 = +1.0 - 2.0 * (q_y * q_y + q_z * q_z) yaw_z = np.arctan2(t3, t4) return roll_x, pitch_y, yaw_z # in radians def getKey(): if os.name == 'nt': if sys.version_info[0] >= 3: return msvcrt.getch().decode() else: return msvcrt.getch() tty.setraw(sys.stdin.fileno()) rlist, _, _ = select.select([sys.stdin], [], [], 0.1) if rlist: key = sys.stdin.read(1) else: key = '' termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings) return key def check_velocity(cur_vel): max_x = 5.5 #km/h max_y = 3.3 #km/h max_wz = 3.5 #deg/sec x_vel, y_vel, z_vel, z_angle = cur_vel if max_x < abs(x_vel): if x_vel > 0: x_vel = max_x else: x_vel = -max_x if max_y < abs(y_vel): if y_vel > 0: y_vel = max_y else: y_vel = -max_y if max_wz < abs(z_angle): if z_angle > 0: z_angle = max_wz else: z_angle = -max_wz return [x_vel, y_vel, z_vel], z_angle def mecanum_wheel_velocity(vx, vy, wz): r = 0.0762 # radius of wheel l = 0.23 #length between {b} and wheel w = 0.25225 #depth between {b} abd wheel alpha = l + w q_dot = np.array([wz, vx, vy]) J_pseudo = np.array([[-alpha, 1, -1],[alpha, 1, 1],[alpha, 1, -1],[alpha, 1,1]]) u = 1/r * J_pseudo @ np.reshape(q_dot,(3,1))#q_dot.T return u def move_mecanum(data): # start publisher of cmd_vel to control mecanum linear, angular = data pub = rospy.Publisher("/cmd_vel", Twist, queue_size=10) twist = Twist() twist.linear.x = linear[0] twist.linear.y = linear[1] twist.linear.z = linear[2] twist.angular.x = angular[0] twist.angular.y = angular[1] twist.angular.z = angular[2] pub.publish(twist) print(twist) return [linear[0],linear[1],linear[2]], angular[2] def move_chassis(data): #pub_1 = rospy.Publisher('/link_chassis_vel', Twist,queue_size=10) pub_1 = rospy.Publisher('/dual_motion_robot/chassis_pos_joint_controller/command', Float64, queue_size=10) #pub_WL = rospy.Publisher('/kitech_robot/mp_left_wheel_joint_controller/command', Float64, queue_size=10) #pub_WR = rospy.Publisher('/kitech_robot/mp_right_wheel_joint_controller/command', Float64, queue_size=10) #pub_WL.publish(data) #pub_WR.publish(data) pub_1.publish(data) print(data) if __name__ == '__main__': try: rospy.init_node('mecanum_key') if os.name != 'nt': settings = termios.tcgetattr(sys.stdin) linear = [0, 0, 0] angular = [0, 0, 0] plant_x = 0 while(1): key = getKey() if key == 'w' : linear[0] += 1 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'x' : linear[0] -= 1 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'a' : angular[2] += 0.5 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'd' : angular[2] -= 0.5 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'q' : plant_x += 0.01 move_chassis(plant_x) elif key == 'e' : plant_x -= 0.01 move_chassis(plant_x) elif key == 's' : linear = [0, 0, 0] angular = [0, 0, 0] linear, angular[2] = move_mecanum([linear,angular]) if (key == '\x03'): linear = [0, 0, 0] angular = [0, 0, 0] linear, angular[2] = move_mecanum([linear,angular]) break except rospy.ROSInt: pass
22.725888
110
0.542104
import rospy import numpy as np from std_msgs.msg import Float64 from gazebo_msgs.srv import * from geometry_msgs.msg import * import sys, select, os import roslib if os.name == 'nt': import msvcrt else: import tty, termios roslib.load_manifest('dual_gazebo') def qua2eular(x,y,z,w): q_x = x q_y = y q_z = z q_w = w t0 = +2.0 * (q_w * q_x + q_y * q_z) t1 = +1.0 - 2.0 * (q_x * q_x + q_y * q_y) roll_x = np.arctan2(t0, t1) t2 = +2.0 * (q_w * q_y - q_z * q_x) t2 = +1.0 if t2 > +1.0 else t2 t2 = -1.0 if t2 < -1.0 else t2 pitch_y = np.arcsin(t2) t3 = +2.0 * (q_w * q_z + q_x * q_y) t4 = +1.0 - 2.0 * (q_y * q_y + q_z * q_z) yaw_z = np.arctan2(t3, t4) return roll_x, pitch_y, yaw_z # in radians def getKey(): if os.name == 'nt': if sys.version_info[0] >= 3: return msvcrt.getch().decode() else: return msvcrt.getch() tty.setraw(sys.stdin.fileno()) rlist, _, _ = select.select([sys.stdin], [], [], 0.1) if rlist: key = sys.stdin.read(1) else: key = '' termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings) return key def check_velocity(cur_vel): max_x = 5.5 #km/h max_y = 3.3 #km/h max_wz = 3.5 #deg/sec x_vel, y_vel, z_vel, z_angle = cur_vel if max_x < abs(x_vel): if x_vel > 0: x_vel = max_x else: x_vel = -max_x if max_y < abs(y_vel): if y_vel > 0: y_vel = max_y else: y_vel = -max_y if max_wz < abs(z_angle): if z_angle > 0: z_angle = max_wz else: z_angle = -max_wz return [x_vel, y_vel, z_vel], z_angle def mecanum_wheel_velocity(vx, vy, wz): r = 0.0762 # radius of wheel l = 0.23 #length between {b} and wheel w = 0.25225 #depth between {b} abd wheel alpha = l + w q_dot = np.array([wz, vx, vy]) J_pseudo = np.array([[-alpha, 1, -1],[alpha, 1, 1],[alpha, 1, -1],[alpha, 1,1]]) u = 1/r * J_pseudo @ np.reshape(q_dot,(3,1))#q_dot.T return u def move_mecanum(data): # start publisher of cmd_vel to control mecanum linear, angular = data pub = rospy.Publisher("/cmd_vel", Twist, queue_size=10) twist = Twist() twist.linear.x = linear[0] twist.linear.y = linear[1] twist.linear.z = linear[2] twist.angular.x = angular[0] twist.angular.y = angular[1] twist.angular.z = angular[2] pub.publish(twist) print(twist) return [linear[0],linear[1],linear[2]], angular[2] def move_chassis(data): #pub_1 = rospy.Publisher('/link_chassis_vel', Twist,queue_size=10) pub_1 = rospy.Publisher('/dual_motion_robot/chassis_pos_joint_controller/command', Float64, queue_size=10) #pub_WL = rospy.Publisher('/kitech_robot/mp_left_wheel_joint_controller/command', Float64, queue_size=10) #pub_WR = rospy.Publisher('/kitech_robot/mp_right_wheel_joint_controller/command', Float64, queue_size=10) #pub_WL.publish(data) #pub_WR.publish(data) pub_1.publish(data) print(data) if __name__ == '__main__': try: rospy.init_node('mecanum_key') if os.name != 'nt': settings = termios.tcgetattr(sys.stdin) linear = [0, 0, 0] angular = [0, 0, 0] plant_x = 0 while(1): key = getKey() if key == 'w' : linear[0] += 1 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'x' : linear[0] -= 1 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'a' : angular[2] += 0.5 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'd' : angular[2] -= 0.5 linear, angular[2] = move_mecanum([linear,angular]) elif key == 'q' : plant_x += 0.01 move_chassis(plant_x) elif key == 'e' : plant_x -= 0.01 move_chassis(plant_x) elif key == 's' : linear = [0, 0, 0] angular = [0, 0, 0] linear, angular[2] = move_mecanum([linear,angular]) if (key == '\x03'): linear = [0, 0, 0] angular = [0, 0, 0] linear, angular[2] = move_mecanum([linear,angular]) break except rospy.ROSInt: pass
0
0
316b321b9f7d046b9a1c73717ede0ee14f70b07e
3,151
py
Python
datalad/crawler/pipelines/tests/test_fcptable.py
yarikoptic/datalad
c0cd538de2ed9a30c0f58256c7afa6e18d325505
[ "MIT" ]
null
null
null
datalad/crawler/pipelines/tests/test_fcptable.py
yarikoptic/datalad
c0cd538de2ed9a30c0f58256c7afa6e18d325505
[ "MIT" ]
6
2015-11-20T21:41:13.000Z
2018-06-12T14:27:32.000Z
datalad/crawler/pipelines/tests/test_fcptable.py
yarikoptic/datalad
c0cd538de2ed9a30c0f58256c7afa6e18d325505
[ "MIT" ]
1
2017-03-28T14:44:16.000Z
2017-03-28T14:44:16.000Z
# emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*- # ex: set sts=4 ts=4 sw=4 noet: # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the datalad package for the # copyright and license terms. # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## from os.path import exists from requests.exceptions import InvalidURL from ....utils import chpwd from ....dochelpers import exc_str from ....tests.utils import assert_true, assert_raises, assert_false from ....tests.utils import SkipTest from ....tests.utils import with_tempfile, skip_if_no_network, use_cassette from ....tests.utils import skip_if_url_is_not_available from datalad.crawler.pipelines.tests.utils import _test_smoke_pipelines from datalad.crawler.pipelines.fcptable import * from datalad.crawler.pipeline import run_pipeline import logging from logging import getLogger lgr = getLogger('datalad.crawl.tests') from ..fcptable import pipeline, superdataset_pipeline TOPURL = "http://fcon_1000.projects.nitrc.org/fcpClassic/FcpTable.html" def test_smoke_pipelines(): yield _test_smoke_pipelines, pipeline, ['bogus'] yield _test_smoke_pipelines, superdataset_pipeline, [] @use_cassette('test_fcptable_dataset') @skip_if_no_network @with_tempfile(mkdir=True) def _test_dataset(dataset, error, create, skip, tmpdir): with chpwd(tmpdir): if create: with open("README.txt", 'w') as f: f.write(" ") pipe = [ crawl_url(TOPURL), [ assign({'dataset': dataset}), skip_if({'dataset': 'Cleveland CCF|Durham_Madden|NewYork_Test-Retest_Reliability'}, re=True), sub({'response': {'<div class="tableParam">([^<]*)</div>': r'\1'}}), find_dataset(dataset), extract_readme, ] ] if error: assert_raises((InvalidURL, RuntimeError), run_pipeline, pipe) return try: run_pipeline(pipe) except InvalidURL as exc: raise SkipTest( "This version of requests considers %s to be invalid. " "See https://github.com/kennethreitz/requests/issues/3683#issuecomment-261947670 : %s" % (TOPURL, exc_str(exc))) if skip: assert_false(exists("README.txt")) return assert_true(exists("README.txt")) f = open("README.txt", 'r') contents = f.read() assert_true("Author(s)" and "Details" in contents) def test_dataset(): raise SkipTest('Bring back when NITRC is back (gh-1472)') skip_if_url_is_not_available(TOPURL, regex='service provider outage') yield _test_dataset, 'Baltimore', None, False, False yield _test_dataset, 'AnnArbor_b', None, False, False yield _test_dataset, 'Ontario', None, False, False yield _test_dataset, 'Boston', RuntimeError, False, False yield _test_dataset, "AnnArbor_b", None, True, False yield _test_dataset, "Cleveland CCF", None, False, True
34.25
109
0.628689
# emacs: -*- mode: python; py-indent-offset: 4; tab-width: 4; indent-tabs-mode: nil -*- # ex: set sts=4 ts=4 sw=4 noet: # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## # # See COPYING file distributed along with the datalad package for the # copyright and license terms. # # ## ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## from os.path import exists from requests.exceptions import InvalidURL from ....utils import chpwd from ....dochelpers import exc_str from ....tests.utils import assert_true, assert_raises, assert_false from ....tests.utils import SkipTest from ....tests.utils import with_tempfile, skip_if_no_network, use_cassette from ....tests.utils import skip_if_url_is_not_available from datalad.crawler.pipelines.tests.utils import _test_smoke_pipelines from datalad.crawler.pipelines.fcptable import * from datalad.crawler.pipeline import run_pipeline import logging from logging import getLogger lgr = getLogger('datalad.crawl.tests') from ..fcptable import pipeline, superdataset_pipeline TOPURL = "http://fcon_1000.projects.nitrc.org/fcpClassic/FcpTable.html" def test_smoke_pipelines(): yield _test_smoke_pipelines, pipeline, ['bogus'] yield _test_smoke_pipelines, superdataset_pipeline, [] @use_cassette('test_fcptable_dataset') @skip_if_no_network @with_tempfile(mkdir=True) def _test_dataset(dataset, error, create, skip, tmpdir): with chpwd(tmpdir): if create: with open("README.txt", 'w') as f: f.write(" ") pipe = [ crawl_url(TOPURL), [ assign({'dataset': dataset}), skip_if({'dataset': 'Cleveland CCF|Durham_Madden|NewYork_Test-Retest_Reliability'}, re=True), sub({'response': {'<div class="tableParam">([^<]*)</div>': r'\1'}}), find_dataset(dataset), extract_readme, ] ] if error: assert_raises((InvalidURL, RuntimeError), run_pipeline, pipe) return try: run_pipeline(pipe) except InvalidURL as exc: raise SkipTest( "This version of requests considers %s to be invalid. " "See https://github.com/kennethreitz/requests/issues/3683#issuecomment-261947670 : %s" % (TOPURL, exc_str(exc))) if skip: assert_false(exists("README.txt")) return assert_true(exists("README.txt")) f = open("README.txt", 'r') contents = f.read() assert_true("Author(s)" and "Details" in contents) def test_dataset(): raise SkipTest('Bring back when NITRC is back (gh-1472)') skip_if_url_is_not_available(TOPURL, regex='service provider outage') yield _test_dataset, 'Baltimore', None, False, False yield _test_dataset, 'AnnArbor_b', None, False, False yield _test_dataset, 'Ontario', None, False, False yield _test_dataset, 'Boston', RuntimeError, False, False yield _test_dataset, "AnnArbor_b", None, True, False yield _test_dataset, "Cleveland CCF", None, False, True
0
0
2adfa5603510b5c0ce9f049510e9539047f88898
1,683
py
Python
KuldeepDwivedi_A2305218477/BFS/Water_jug.py
suraj0803/AI-LAB-WORK
c09776c104529678215d4f51a756ea0039a89df4
[ "Apache-2.0" ]
null
null
null
KuldeepDwivedi_A2305218477/BFS/Water_jug.py
suraj0803/AI-LAB-WORK
c09776c104529678215d4f51a756ea0039a89df4
[ "Apache-2.0" ]
null
null
null
KuldeepDwivedi_A2305218477/BFS/Water_jug.py
suraj0803/AI-LAB-WORK
c09776c104529678215d4f51a756ea0039a89df4
[ "Apache-2.0" ]
null
null
null
# Water Jug problem print("Solution for Water Jug problem!") x = int(input("Enter the capacity of jug1 : ")) y = int(input("Entert the capacity of jug2 : ")) target = int(input("Enter the target volume : ")) def bfs(start, target, x, y): path = [] front = [] front.append(start) visited = [] while(not (not front)): current = front.pop() x = current[0] y = current[1] path.append(current) if(x == target or y == target): print("Found!") return path if(current[0] < x and ([x, current[1]] not in visited)): front.append([x, current[1]]) visited.append([x, current[1]]) if(current[1] < y and ([current[0], y] not in visited)): front.append([current[0], y]) visited.append([current[0], y]) if(current[0] > x and ([0, current[1]] not in visited)): front.append([0, current[1]]) visited.append([0, current[1]]) if(current[1] > y and ([x, 0] not in visited)): front.append([x, 0]) visited.append([x, 0]) if(current[1] > 0 and ([min(x + y, x), max(0, x + y - x)] not in visited)): front.append([min(x + y, x), max(0, x + y - x)]) visited.append([min(x + y, x), max(0, x + y - x)]) if current[0] > 0 and ([max(0, x + y - y), min(x + y, y)] not in visited): front.append([max(0, x + y - y), min(x + y, y)]) visited.append([max(0, x + y - y), min(x + y, y)]) return -1 def gcd(a, b): if a == 0: return b return gcd(b%a, a) start = [0, 0] if target % gcd(x,y) == 0: print(bfs(start, target, x, y)) else: print("No solution")
25.892308
84
0.512181
# Water Jug problem print("Solution for Water Jug problem!") x = int(input("Enter the capacity of jug1 : ")) y = int(input("Entert the capacity of jug2 : ")) target = int(input("Enter the target volume : ")) def bfs(start, target, x, y): path = [] front = [] front.append(start) visited = [] while(not (not front)): current = front.pop() x = current[0] y = current[1] path.append(current) if(x == target or y == target): print("Found!") return path if(current[0] < x and ([x, current[1]] not in visited)): front.append([x, current[1]]) visited.append([x, current[1]]) if(current[1] < y and ([current[0], y] not in visited)): front.append([current[0], y]) visited.append([current[0], y]) if(current[0] > x and ([0, current[1]] not in visited)): front.append([0, current[1]]) visited.append([0, current[1]]) if(current[1] > y and ([x, 0] not in visited)): front.append([x, 0]) visited.append([x, 0]) if(current[1] > 0 and ([min(x + y, x), max(0, x + y - x)] not in visited)): front.append([min(x + y, x), max(0, x + y - x)]) visited.append([min(x + y, x), max(0, x + y - x)]) if current[0] > 0 and ([max(0, x + y - y), min(x + y, y)] not in visited): front.append([max(0, x + y - y), min(x + y, y)]) visited.append([max(0, x + y - y), min(x + y, y)]) return -1 def gcd(a, b): if a == 0: return b return gcd(b%a, a) start = [0, 0] if target % gcd(x,y) == 0: print(bfs(start, target, x, y)) else: print("No solution")
0
0
63865fa932487f27230a6778c7a192a4701cb3dc
6,190
py
Python
ReadStereoCalibration.py
cxn304/Strain-gauges-recognition
1f9f64f8a0fa01892509835694ff88bc47736a7b
[ "Apache-2.0" ]
null
null
null
ReadStereoCalibration.py
cxn304/Strain-gauges-recognition
1f9f64f8a0fa01892509835694ff88bc47736a7b
[ "Apache-2.0" ]
null
null
null
ReadStereoCalibration.py
cxn304/Strain-gauges-recognition
1f9f64f8a0fa01892509835694ff88bc47736a7b
[ "Apache-2.0" ]
null
null
null
''' author: cxn version: 0.1.0 read camera calibration from mat ''' import numpy as np import cv2 from scipy.io import loadmat import matplotlib.pyplot as plt # class stereoCameral(object): def __init__(self): stereoParameters = loadmat("./internal_reference/stereoParameters.mat") self.cam_matrix_left = stereoParameters["stereoParameters"]["K1"][0][0] # IntrinsicMatrix self.distortion_l = stereoParameters["stereoParameters"]["D1"][0][0] # distortion self.cam_matrix_right = stereoParameters["stereoParameters"]["K2"][0][0] self.distortion_r = stereoParameters["stereoParameters"]["D2"][0][0] self.size = stereoParameters["stereoParameters"]["size"][0][0] # image size self.R = stereoParameters["stereoParameters"]["rot"][0][0].T self.T = stereoParameters["stereoParameters"]["trans"][0][0] def getRectifyTransform(height, width, config): # left_K = config.cam_matrix_left right_K = config.cam_matrix_right left_distortion = config.distortion_l right_distortion = config.distortion_r R = config.R T = config.T #,cv2.stereoRectify """ stereoRectify() . , cameraMatrix1- distCoeffs1- cameraMatrix2- distCoeffs1- imageSize- R- stereoCalibrate() R T- stereoCalibrate() T R1-, R2-, P1-, P2-, Q-4*4 flags-CV_CALIB_ZERO_DISPARITY, CV_CALIB_ZERO_DISPARITY , ., alpha-.,.0, ,1,. 0~1,. newImageSize-,. validPixROI1-,Rect. validPixROI2-,Rect. """ if type(height) != "int" or type(width) != "int": height = int(height) width = int(width) R1, R2, P1, P2, Q, roi1, roi2 = cv2.stereoRectify( left_K, left_distortion, right_K, right_distortion, (width, height), R, T.T, alpha=0.5) """ initUndistortRectifyMap cameraMatrix- distCoeffs- R- stereoCalibrate() R newCameraMatrix-() Size- m1type-, CV_32FC1CV_16SC2 map1- map2- """ map1x, map1y = cv2.initUndistortRectifyMap( left_K, left_distortion, R1, P1, (width, height), cv2.CV_32FC1) map2x, map2y = cv2.initUndistortRectifyMap( right_K, right_distortion, R2, P2, (width, height), cv2.CV_32FC1) return map1x, map1y, map2x, map2y, Q # def rectifyImage(image1, image2, map1x, map1y, map2x, map2y): """ cv2.remap, """ rectifyed_img1 = cv2.remap(image1, map1x, map1y, cv2.INTER_AREA) rectifyed_img2 = cv2.remap(image2, map2x, map2y, cv2.INTER_AREA) return rectifyed_img1, rectifyed_img2 # def sgbm(imgL, imgR): #SGBM blockSize = 8 img_channels = 3 stereo = cv2.StereoSGBM_create(minDisparity = 1, numDisparities = 64, blockSize = blockSize, P1 = 8 * img_channels * blockSize * blockSize, P2 = 32 * img_channels * blockSize * blockSize, disp12MaxDiff = -1, preFilterCap = 1, uniquenessRatio = 10, speckleWindowSize = 100, speckleRange = 100, mode = cv2.STEREO_SGBM_MODE_HH) # disp = stereo.compute(imgL, imgR) disp = np.divide(disp.astype(np.float32), 16.) # 16 return disp #, def threeD(disp, Q): # 3D points_3d = cv2.reprojectImageTo3D(disp, Q) points_3d = points_3d.reshape(points_3d.shape[0] * points_3d.shape[1], 3) X = points_3d[:, 0] Y = points_3d[:, 1] Z = points_3d[:, 2] # remove_idx1 = np.where(Z <= 0) remove_idx2 = np.where(Z > 15000) remove_idx3 = np.where(X > 10000) remove_idx4 = np.where(X < -10000) remove_idx5 = np.where(Y > 10000) remove_idx6 = np.where(Y < -10000) remove_idx = np.hstack( (remove_idx1[0], remove_idx2[0], remove_idx3[0], remove_idx4[0], remove_idx5[0], remove_idx6[0])) points_3d = np.delete(points_3d, remove_idx, 0) #, if points_3d.any(): x = np.median(points_3d[:, 0]) y = np.median(points_3d[:, 1]) z = np.median(points_3d[:, 2]) targetPoint = [x, y, z] else: targetPoint = [0, 0, -1]# return targetPoint # ---- def draw_line(image1, image2): # height = max(image1.shape[0], image2.shape[0]) width = image1.shape[1] + image2.shape[1] output = np.zeros((height, width, 3), dtype=np.uint8) output[0:image1.shape[0], 0:image1.shape[1]] = image1 output[0:image2.shape[0], image1.shape[1]:] = image2 # line_interval = 50 # 50 for k in range(height // line_interval): cv2.line(output, (0, line_interval * (k + 1)), ( 2 * width, line_interval * (k + 1)), (0, 255, 0), thickness=2, lineType=cv2.LINE_AA) imgL = cv2.imread("D:/cxn_project/Strain-gauges-recognition/cali_img/left/l6.bmp") imgR = cv2.imread("D:/cxn_project/Strain-gauges-recognition/cali_img/right/r6.bmp") height, width = imgL.shape[0:2] # config = stereoCameral() map1x, map1y, map2x, map2y, Q = getRectifyTransform(height, width, config) iml_rectified, imr_rectified = rectifyImage(imgL, imgR, map1x, map1y, map2x, map2y) disp = sgbm(iml_rectified, imr_rectified) plt.imshow(disp) target_point = threeD(disp, Q) # 3D print(target_point)
32.578947
105
0.641034
''' author: cxn version: 0.1.0 read camera calibration from mat ''' import numpy as np import cv2 from scipy.io import loadmat import matplotlib.pyplot as plt #双目相机参数 class stereoCameral(object): def __init__(self): stereoParameters = loadmat("./internal_reference/stereoParameters.mat") self.cam_matrix_left = stereoParameters["stereoParameters"]["K1"][0][0] # IntrinsicMatrix self.distortion_l = stereoParameters["stereoParameters"]["D1"][0][0] # distortion self.cam_matrix_right = stereoParameters["stereoParameters"]["K2"][0][0] self.distortion_r = stereoParameters["stereoParameters"]["D2"][0][0] self.size = stereoParameters["stereoParameters"]["size"][0][0] # image size self.R = stereoParameters["stereoParameters"]["rot"][0][0].T self.T = stereoParameters["stereoParameters"]["trans"][0][0] def getRectifyTransform(height, width, config): #读取矩阵参数 left_K = config.cam_matrix_left right_K = config.cam_matrix_right left_distortion = config.distortion_l right_distortion = config.distortion_r R = config.R T = config.T #计算校正变换,cv2.stereoRectify """ stereoRectify() 的作用是为每个摄像头计算立体校正的映射矩阵. 所以其运行结果并不是直接将图片进行立体矫正,而是得出进行立体矫正所需要的映射矩阵 cameraMatrix1-第一个摄像机的摄像机矩阵 distCoeffs1-第一个摄像机的畸变向量 cameraMatrix2-第二个摄像机的摄像机矩阵 distCoeffs1-第二个摄像机的畸变向量 imageSize-图像大小 R- stereoCalibrate() 求得的R矩阵 T- stereoCalibrate() 求得的T矩阵 R1-输出矩阵,第一个摄像机的校正变换矩阵(旋转变换) R2-输出矩阵,第二个摄像机的校正变换矩阵(旋转矩阵) P1-输出矩阵,第一个摄像机在新坐标系下的投影矩阵 P2-输出矩阵,第二个摄像机在想坐标系下的投影矩阵 Q-4*4的深度差异映射矩阵 flags-可选的标志有两种零或者CV_CALIB_ZERO_DISPARITY, 如果设置 CV_CALIB_ZERO_DISPARITY 的话,该函数会让两幅校正后的图像的主点 有相同的像素坐标.否则该函数会水平或垂直的移动图像,以使得其有用的范围最大 alpha-拉伸参数.如果设置为负或忽略,将不进行拉伸.如果设置为0,那么校正后图像 只有有效的部分会被显示(没有黑色的部分),如果设置为1,那么就会显示整个图像. 设置为0~1之间的某个值,其效果也居于两者之间. newImageSize-校正后的图像分辨率,默认为原分辨率大小. validPixROI1-可选的输出参数,Rect型数据.其内部的所有像素都有效 validPixROI2-可选的输出参数,Rect型数据.其内部的所有像素都有效 """ if type(height) != "int" or type(width) != "int": height = int(height) width = int(width) R1, R2, P1, P2, Q, roi1, roi2 = cv2.stereoRectify( left_K, left_distortion, right_K, right_distortion, (width, height), R, T.T, alpha=0.5) """ initUndistortRectifyMap cameraMatrix-摄像机参数矩阵 distCoeffs-畸变参数矩阵 R- stereoCalibrate() 求得的R矩阵 newCameraMatrix-矫正后的摄像机矩阵(可省略) Size-没有矫正图像的分辨率 m1type-第一个输出映射的数据类型,可以为 CV_32FC1或CV_16SC2 map1-输出的第一个映射变换 map2-输出的第二个映射变换 """ map1x, map1y = cv2.initUndistortRectifyMap( left_K, left_distortion, R1, P1, (width, height), cv2.CV_32FC1) map2x, map2y = cv2.initUndistortRectifyMap( right_K, right_distortion, R2, P2, (width, height), cv2.CV_32FC1) return map1x, map1y, map2x, map2y, Q # 畸变校正和立体校正 def rectifyImage(image1, image2, map1x, map1y, map2x, map2y): """ cv2.remap重映射,就是把一幅图像中某位置的像素放置到另一个图片指定位置的过程 """ rectifyed_img1 = cv2.remap(image1, map1x, map1y, cv2.INTER_AREA) rectifyed_img2 = cv2.remap(image2, map2x, map2y, cv2.INTER_AREA) return rectifyed_img1, rectifyed_img2 #视差计算 def sgbm(imgL, imgR): #SGBM参数设置 blockSize = 8 img_channels = 3 stereo = cv2.StereoSGBM_create(minDisparity = 1, numDisparities = 64, blockSize = blockSize, P1 = 8 * img_channels * blockSize * blockSize, P2 = 32 * img_channels * blockSize * blockSize, disp12MaxDiff = -1, preFilterCap = 1, uniquenessRatio = 10, speckleWindowSize = 100, speckleRange = 100, mode = cv2.STEREO_SGBM_MODE_HH) # 计算视差图 disp = stereo.compute(imgL, imgR) disp = np.divide(disp.astype(np.float32), 16.) # 除以16得到真实视差图 return disp #计算三维坐标,并删除错误点 def threeD(disp, Q): # 计算像素点的3D坐标(左相机坐标系下) points_3d = cv2.reprojectImageTo3D(disp, Q) points_3d = points_3d.reshape(points_3d.shape[0] * points_3d.shape[1], 3) X = points_3d[:, 0] Y = points_3d[:, 1] Z = points_3d[:, 2] #选择并删除错误的点 remove_idx1 = np.where(Z <= 0) remove_idx2 = np.where(Z > 15000) remove_idx3 = np.where(X > 10000) remove_idx4 = np.where(X < -10000) remove_idx5 = np.where(Y > 10000) remove_idx6 = np.where(Y < -10000) remove_idx = np.hstack( (remove_idx1[0], remove_idx2[0], remove_idx3[0], remove_idx4[0], remove_idx5[0], remove_idx6[0])) points_3d = np.delete(points_3d, remove_idx, 0) #计算目标点(这里我选择的是目标区域的中位数,可根据实际情况选取) if points_3d.any(): x = np.median(points_3d[:, 0]) y = np.median(points_3d[:, 1]) z = np.median(points_3d[:, 2]) targetPoint = [x, y, z] else: targetPoint = [0, 0, -1]#无法识别目标区域 return targetPoint # 立体校正检验----画线 def draw_line(image1, image2): # 建立输出图像 height = max(image1.shape[0], image2.shape[0]) width = image1.shape[1] + image2.shape[1] output = np.zeros((height, width, 3), dtype=np.uint8) output[0:image1.shape[0], 0:image1.shape[1]] = image1 output[0:image2.shape[0], image1.shape[1]:] = image2 # 绘制等间距平行线 line_interval = 50 # 直线间隔:50 for k in range(height // line_interval): cv2.line(output, (0, line_interval * (k + 1)), ( 2 * width, line_interval * (k + 1)), (0, 255, 0), thickness=2, lineType=cv2.LINE_AA) imgL = cv2.imread("D:/cxn_project/Strain-gauges-recognition/cali_img/left/l6.bmp") imgR = cv2.imread("D:/cxn_project/Strain-gauges-recognition/cali_img/right/r6.bmp") height, width = imgL.shape[0:2] # 读取相机内参和外参 config = stereoCameral() map1x, map1y, map2x, map2y, Q = getRectifyTransform(height, width, config) iml_rectified, imr_rectified = rectifyImage(imgL, imgR, map1x, map1y, map2x, map2y) disp = sgbm(iml_rectified, imr_rectified) plt.imshow(disp) target_point = threeD(disp, Q) # 计算目标点的3D坐标(左相机坐标系下) print(target_point)
2,151
0
9789fa0d9128f43bf8dfdc8bf19f5e86479b11c4
2,110
py
Python
lib/model/model.py
smallstrong0/easy_python
c6794bf290731beb9b3cab94f815880befb37d9b
[ "MIT" ]
2
2020-09-16T09:32:09.000Z
2021-02-10T12:09:40.000Z
lib/model/model.py
smallstrong0/easy_python
c6794bf290731beb9b3cab94f815880befb37d9b
[ "MIT" ]
null
null
null
lib/model/model.py
smallstrong0/easy_python
c6794bf290731beb9b3cab94f815880befb37d9b
[ "MIT" ]
null
null
null
#! /usr/bin/env python # -*- coding: utf-8 -*- # Put your models here from sqlalchemy import Column, BigInteger, Integer, String, SmallInteger, Float, Boolean, DECIMAL, Text, DateTime, Date, \ Index, UniqueConstraint from sqlalchemy.dialects.mysql import MEDIUMTEXT, LONGTEXT, BIGINT, INTEGER, SMALLINT, TINYINT, TIMESTAMP from sqlalchemy.ext.declarative import declarative_base from decimal import Decimal from sqlalchemy.schema import Sequence from lib.model.base import Base, BaseModel """ 1. BaseModel 2. id 3.comment 4. 5. """ class ApiLog(BaseModel): __tablename__ = "api_log" __doc__ = 'log' log_id = Column(BigInteger, primary_key=True, autoincrement=True, comment='') time_consuming = Column(Integer, nullable=False, default=0, comment=' ') params = Column(String(1024), nullable=False, default='{}', comment='url') body = Column(Text, nullable=False, default='{}', comment='body') response = Column(Text, nullable=False, default='{}', comment='') date_time_in = Column(String(30), nullable=False, default='', comment='') date_time_out = Column(String(30), nullable=False, default='', comment='') method = Column(String(10), nullable=False, default='', comment='http method') url = Column(String(1024), nullable=False, default='', comment='http path url') user_id = Column(BigInteger, nullable=False, comment='user_id') result = Column(String(10), nullable=False, default='SUCCESS', comment='') class Test(BaseModel): __tablename__ = 'test' test_id = Column(INTEGER(11), primary_key=True, autoincrement=True) test_name = Column(String(128), nullable=False, default="") if __name__ == '__main__': from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, scoped_session from sqlalchemy import create_engine from setting import MYSQL engine = create_engine(MYSQL) DBSession = scoped_session(sessionmaker(bind=engine)) Base.metadata.create_all(engine)
37.678571
122
0.735071
#! /usr/bin/env python # -*- coding: utf-8 -*- # Put your models here from sqlalchemy import Column, BigInteger, Integer, String, SmallInteger, Float, Boolean, DECIMAL, Text, DateTime, Date, \ Index, UniqueConstraint from sqlalchemy.dialects.mysql import MEDIUMTEXT, LONGTEXT, BIGINT, INTEGER, SMALLINT, TINYINT, TIMESTAMP from sqlalchemy.ext.declarative import declarative_base from decimal import Decimal from sqlalchemy.schema import Sequence from lib.model.base import Base, BaseModel """ 建表规范 1.之后建表 请继承BaseModel 2.表字段主键自增强制取名 不允许是id 3.comment备注强制每个字段都要 4.建表之后如果如果关联其他表字段时候 名字别乱取 要统一 5.字段取名 出现下划线警示时候请自行注意单词拼写 """ class ApiLog(BaseModel): __tablename__ = "api_log" __doc__ = '接口log' log_id = Column(BigInteger, primary_key=True, autoincrement=True, comment='日志主键') time_consuming = Column(Integer, nullable=False, default=0, comment='接口耗时 单位毫秒') params = Column(String(1024), nullable=False, default='{}', comment='url参数') body = Column(Text, nullable=False, default='{}', comment='body参数') response = Column(Text, nullable=False, default='{}', comment='返回结果') date_time_in = Column(String(30), nullable=False, default='', comment='调用时间') date_time_out = Column(String(30), nullable=False, default='', comment='返回时间') method = Column(String(10), nullable=False, default='', comment='http method') url = Column(String(1024), nullable=False, default='', comment='http path url') user_id = Column(BigInteger, nullable=False, comment='登录用户的user_id') result = Column(String(10), nullable=False, default='SUCCESS', comment='结果') class Test(BaseModel): __tablename__ = 'test' test_id = Column(INTEGER(11), primary_key=True, autoincrement=True) test_name = Column(String(128), nullable=False, default="") if __name__ == '__main__': from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker, scoped_session from sqlalchemy import create_engine from setting import MYSQL engine = create_engine(MYSQL) DBSession = scoped_session(sessionmaker(bind=engine)) Base.metadata.create_all(engine)
360
0
7f00b4d7b1812863814e19a971adecf24942fc84
3,949
py
Python
algorithms/Search.py
zhaoxinlu/leetcode-algorithms
f5e1c94c99628e7fb04ba158f686a55a8093e933
[ "MIT" ]
null
null
null
algorithms/Search.py
zhaoxinlu/leetcode-algorithms
f5e1c94c99628e7fb04ba158f686a55a8093e933
[ "MIT" ]
null
null
null
algorithms/Search.py
zhaoxinlu/leetcode-algorithms
f5e1c94c99628e7fb04ba158f686a55a8093e933
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Author: Zhao Xinlu School: BUPT Date: 2018-01-15 Function: Some different searching algorithms and its performance """ def Simple_search(lists, key): ''' Simple_search: ; O(n) :param lists: search list :param key: the value of key :return: the key's location in the list ''' length = len(lists) for i in range(0, length): if lists[i] == key: return i return False def Binary_search(lists, key): ''' Binary search(): O(logn) :param lists: search list :param key: the value of key :return: the key's location in the list ''' length = len(lists) low = 0 high = length - 1 while low < high: mid = int((low + high) / 2) # mid = low + 1/2 * (high - low) if lists[mid] > key: high = mid - 1 elif lists[mid] < key: low = mid + 1 else: return mid return False def Binary_search2(lists, key, low, high): ''' Binary search 2() :param lists: search list :param key: the value of key :param low: :param high: :return: the key's location in the list ''' mid = int((low + high) / 2) if lists[mid] == key: return mid elif lists[mid] < key: return Binary_search2(lists, key, mid+1, high) else: return Binary_search2(lists, key, low, mid-1) def Binary_search_plus(lists, key): ''' Binary search plus(): :param lists: search list :param key: the value of key :return: the key's location in the list ''' length = len(lists) low = 0 high = length - 1 while low < high: mid = low + int((high - low) * (key - lists[low]) / (lists[high] - lists[low])) # value = (key - list[low])/(list[high] - list[low]) if lists[mid] > key: high = mid - 1 elif lists[mid] < key: low = mid + 1 else: return mid return False def Fibonacci_search(lists, key): ''' Fibonacci search():mid. O(logn) :param lists: search list :param key: the value of search key :return: the key's location in the list ''' # , FibonacciList = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368] length = len(lists) low = 0 high = length - 1 # # # F[k]-1-high k = 0 while high > FibonacciList[k] - 1: k += 1 print k i = high while FibonacciList[k] - 1 > i: lists.append(lists[high]) i += 1 print lists # while low <= high: if k < 2: mid = low else: mid = low + FibonacciList[k] - 1 # if key < lists[mid]: high = mid - 1 k -= 1 elif key > lists[mid]: low = mid + 1 k -= 2 else: if mid <= high: return mid else: return high return False if __name__ == '__main__': key = 7 TestList1 = [3, 6, 5, 9, 7, 1, 8, 2, 4] TestList2 = [1, 2, 3, 4, 5, 6, 7, 8, 9] TestList3 = [1, 5, 7, 8, 22, 54, 99, 123, 200, 222, 444] # result = Simple_search(TestList1, key) # result = Binary_search(TestList2, key) # result = Binary_search2(TestList2, key, 0, len(TestList2)) # result = Binary_search_plus(TestList2, key) result = Fibonacci_search(TestList3, key=444) print "Key's location of the list is : lists[", result, "]"
26.326667
87
0.545455
# -*- coding: utf-8 -*- """ Author: Zhao Xinlu School: BUPT Date: 2018-01-15 Function: Some different searching algorithms and its performance """ def Simple_search(lists, key): ''' Simple_search: 数据不排序的线性查找,遍历数据元素; 性能: 时间复杂度:O(n) :param lists: search list :param key: the value of key :return: the key's location in the list ''' length = len(lists) for i in range(0, length): if lists[i] == key: return i return False def Binary_search(lists, key): ''' Binary search(二分查找):在查找表中不断取中间元素与查找值进行比较,以二分之一的倍率进行表范围的缩小。 性能: 时间复杂度:O(logn) :param lists: search list :param key: the value of key :return: the key's location in the list ''' length = len(lists) low = 0 high = length - 1 while low < high: mid = int((low + high) / 2) # mid = low + 1/2 * (high - low) if lists[mid] > key: high = mid - 1 elif lists[mid] < key: low = mid + 1 else: return mid return False def Binary_search2(lists, key, low, high): ''' Binary search 2(二分查找的递归实现) :param lists: search list :param key: the value of key :param low: :param high: :return: the key's location in the list ''' mid = int((low + high) / 2) if lists[mid] == key: return mid elif lists[mid] < key: return Binary_search2(lists, key, mid+1, high) else: return Binary_search2(lists, key, low, mid-1) def Binary_search_plus(lists, key): ''' Binary search plus(插值查找):二分查找的优化 对半过滤还不够狠,要是每次都排除十分之九的数据岂不是更好?选择这个值就是关键问题 :param lists: search list :param key: the value of key :return: the key's location in the list ''' length = len(lists) low = 0 high = length - 1 while low < high: mid = low + int((high - low) * (key - lists[low]) / (lists[high] - lists[low])) # 插值的核心公式: value = (key - list[low])/(list[high] - list[low]) if lists[mid] > key: high = mid - 1 elif lists[mid] < key: low = mid + 1 else: return mid return False def Fibonacci_search(lists, key): ''' Fibonacci search(斐波那契查找):利用斐波那契数列的性质,黄金分割的原理来确定mid的位置. 性能: 时间复杂的:O(logn) :param lists: search list :param key: the value of search key :return: the key's location in the list ''' # 需要一个现成的斐波那契列表, 其最大元素的值必须超过查找表中元素个数的数值。 FibonacciList = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584, 4181, 6765, 10946, 17711, 28657, 46368] length = len(lists) low = 0 high = length - 1 # 为了使得查找表满足斐波那契特性,在表的最后添加几个同样的值 # 这个值是原查找表的最后那个元素的值 # 添加的个数由F[k]-1-high决定 k = 0 while high > FibonacciList[k] - 1: k += 1 print k i = high while FibonacciList[k] - 1 > i: lists.append(lists[high]) i += 1 print lists # 算法主逻辑 while low <= high: if k < 2: mid = low else: mid = low + FibonacciList[k] - 1 # 利用斐波那契数列来找寻下一个要比较的关键字的位置 if key < lists[mid]: high = mid - 1 k -= 1 elif key > lists[mid]: low = mid + 1 k -= 2 else: if mid <= high: return mid else: return high return False if __name__ == '__main__': key = 7 TestList1 = [3, 6, 5, 9, 7, 1, 8, 2, 4] TestList2 = [1, 2, 3, 4, 5, 6, 7, 8, 9] TestList3 = [1, 5, 7, 8, 22, 54, 99, 123, 200, 222, 444] # result = Simple_search(TestList1, key) # result = Binary_search(TestList2, key) # result = Binary_search2(TestList2, key, 0, len(TestList2)) # result = Binary_search_plus(TestList2, key) result = Fibonacci_search(TestList3, key=444) print "Key's location of the list is : lists[", result, "]"
912
0
d04e9753e38d729231b2e66b82bf461bc97df527
3,220
py
Python
goldman/resources/oauth_ropc.py
sassoo/goldman
b72540c9ad06b5c68aadb1b4fa8cb0b716260bf2
[ "MIT" ]
2
2016-07-26T13:47:51.000Z
2017-02-13T12:08:38.000Z
goldman/resources/oauth_ropc.py
sassoo/goldman
b72540c9ad06b5c68aadb1b4fa8cb0b716260bf2
[ "MIT" ]
null
null
null
goldman/resources/oauth_ropc.py
sassoo/goldman
b72540c9ad06b5c68aadb1b4fa8cb0b716260bf2
[ "MIT" ]
null
null
null
""" resources.oauth_ropc ~~~~~~~~~~~~~~~~~~~~ OAuth2 Resource Owner Password Credentials Grant resource object with responders. This resource should be used to accept access_token requests according to RFC 6749 section 4.3: tools.ietf.org/html/rfc6749#section-4.3 The resource requires a callable to be passed in as the auth_creds property which will be given a username & password. The callable should return a token. Returning a string will be interpreted as an error & a RFC 6749 compliant error response will be sent with the error message as the error_description field in the response. """ import falcon import goldman from goldman.exceptions import AuthRejected from ..resources.base import Resource as BaseResource class Resource(BaseResource): """ OAuth2 Resource Owner Password Credentials Grant resource """ DESERIALIZERS = [ goldman.FormUrlEncodedDeserializer, ] SERIALIZERS = [ goldman.JsonSerializer, ] def __init__(self, auth_creds): self.auth_creds = auth_creds super(Resource, self).__init__() @property def _realm(self): """ Return a string representation of the authentication realm """ return 'Bearer realm="%s"' % goldman.config.AUTH_REALM def on_post(self, req, resp): """ Validate the access token request for spec compliance The spec also dictates the JSON based error response on failure & is handled in this responder. """ grant_type = req.get_param('grant_type') password = req.get_param('password') username = req.get_param('username') # errors or not, disable client caching along the way # per the spec resp.disable_caching() if not grant_type or not password or not username: resp.status = falcon.HTTP_400 resp.serialize({ 'error': 'invalid_request', 'error_description': 'A grant_type, username, & password ' 'parameters are all required when ' 'requesting an OAuth access_token', 'error_uri': 'tools.ietf.org/html/rfc6749#section-4.3.2', }) elif grant_type != 'password': resp.status = falcon.HTTP_400 resp.serialize({ 'error': 'unsupported_grant_type', 'error_description': 'The grant_type parameter MUST be set ' 'to "password" not "%s"' % grant_type, 'error_uri': 'tools.ietf.org/html/rfc6749#section-4.3.2', }) else: try: token = self.auth_creds(username, password) resp.serialize({ 'access_token': token, 'token_type': 'Bearer', }) except AuthRejected as exc: resp.status = falcon.HTTP_401 resp.set_header('WWW-Authenticate', self._realm) resp.serialize({ 'error': 'invalid_client', 'error_description': exc.detail, })
32.857143
76
0.587888
""" resources.oauth_ropc ~~~~~~~~~~~~~~~~~~~~ OAuth2 Resource Owner Password Credentials Grant resource object with responders. This resource should be used to accept access_token requests according to RFC 6749 section 4.3: tools.ietf.org/html/rfc6749#section-4.3 The resource requires a callable to be passed in as the auth_creds property which will be given a username & password. The callable should return a token. Returning a string will be interpreted as an error & a RFC 6749 compliant error response will be sent with the error message as the error_description field in the response. """ import falcon import goldman from goldman.exceptions import AuthRejected from ..resources.base import Resource as BaseResource class Resource(BaseResource): """ OAuth2 Resource Owner Password Credentials Grant resource """ DESERIALIZERS = [ goldman.FormUrlEncodedDeserializer, ] SERIALIZERS = [ goldman.JsonSerializer, ] def __init__(self, auth_creds): self.auth_creds = auth_creds super(Resource, self).__init__() @property def _realm(self): """ Return a string representation of the authentication realm """ return 'Bearer realm="%s"' % goldman.config.AUTH_REALM def on_post(self, req, resp): """ Validate the access token request for spec compliance The spec also dictates the JSON based error response on failure & is handled in this responder. """ grant_type = req.get_param('grant_type') password = req.get_param('password') username = req.get_param('username') # errors or not, disable client caching along the way # per the spec resp.disable_caching() if not grant_type or not password or not username: resp.status = falcon.HTTP_400 resp.serialize({ 'error': 'invalid_request', 'error_description': 'A grant_type, username, & password ' 'parameters are all required when ' 'requesting an OAuth access_token', 'error_uri': 'tools.ietf.org/html/rfc6749#section-4.3.2', }) elif grant_type != 'password': resp.status = falcon.HTTP_400 resp.serialize({ 'error': 'unsupported_grant_type', 'error_description': 'The grant_type parameter MUST be set ' 'to "password" not "%s"' % grant_type, 'error_uri': 'tools.ietf.org/html/rfc6749#section-4.3.2', }) else: try: token = self.auth_creds(username, password) resp.serialize({ 'access_token': token, 'token_type': 'Bearer', }) except AuthRejected as exc: resp.status = falcon.HTTP_401 resp.set_header('WWW-Authenticate', self._realm) resp.serialize({ 'error': 'invalid_client', 'error_description': exc.detail, })
0
0
6e976b5b835b38818090ff9810971809f857fe0e
7,534
py
Python
newapp.py
Andriusjok/VUSAMIFChatBot
9444bcf4f0f2137757925f8e07b8cc3ba442a162
[ "BSD-2-Clause" ]
null
null
null
newapp.py
Andriusjok/VUSAMIFChatBot
9444bcf4f0f2137757925f8e07b8cc3ba442a162
[ "BSD-2-Clause" ]
null
null
null
newapp.py
Andriusjok/VUSAMIFChatBot
9444bcf4f0f2137757925f8e07b8cc3ba442a162
[ "BSD-2-Clause" ]
null
null
null
import os from flask import Flask, request from fbmessenger import BaseMessenger from fbmessenger import quick_replies from fbmessenger.elements import Text from fbmessenger.thread_settings import GreetingText, GetStartedButton, MessengerProfile from fbmessenger import elements from fbmessenger import templates ACCESS_TOKEN = "Baisiai slaptas" VERIFY_TOKEN = "Dar slaptesnis" class Messenger(BaseMessenger): def __init__(self, page_access_token): self.page_access_token = page_access_token super(Messenger, self).__init__(self.page_access_token) def message(self, message): response = Text(text= str(message["message"]["text"])) action = response.to_dict() res = self.send(action) app.logger.debug("Response: {}".format(res)) def delivery(self, message): pass def read(self, message): pass def account_linking(self, message): pass def postback(self, message): payload = message["postback"]["payload"] print(message["postback"]["payload"]) if "start" in payload: elem = elements.Text("Sveiki, nordami pasinaudoti VU SA MIF DUK skiltimi, pasirinkite vien i emiau pateikt tem, kitu atveju uduokite savo klausim.") self.send(elem.to_dict(),"RESPONSE") btn1 = elements.Button(button_type = "postback", title="VU SA+LSP+Apeliacijos", payload="VU SA+LSP+Apeliacijos") btn2 = elements.Button(button_type = "postback", title="BUS+PD", payload="BUS+PD") btn3 = elements.Button(button_type = "postback", title="Studijos+Finansai", payload="Studijos+Finansai") btns = templates.ButtonTemplate( text = "DUK temos", buttons = [btn1, btn2, btn3] ) self.send(btns.to_dict(),"RESPONSE") if "VU SA+LSP+Apeliacijos" == payload: btn1 = elements.Button(button_type = "postback", title="VU SA", payload="VU SA") btn2 = elements.Button(button_type = "postback", title="LSP", payload="LSP") btn3 = elements.Button(button_type = "postback", title="Apeliacijos", payload="Apeliacijos") btns = templates.ButtonTemplate( text = "Potems", buttons = [btn1, btn2, btn3] ) self.send(btns.to_dict(),"RESPONSE") if "BUS+PD" == payload: btn1 = elements.Button(button_type = "postback", title="BUS", payload="BUS") btn2 = elements.Button(button_type = "postback", title="PD", payload="PD") btns = templates.ButtonTemplate( text = "Potems", buttons = [btn1, btn2] ) self.send(btns.to_dict(),"RESPONSE") if "Studijos+Finansai" == payload: btn1 = elements.Button(button_type = "postback", title="Studijos", payload="Studijos") btn2 = elements.Button(button_type = "postback", title="Finansai", payload="Finansai") btns = templates.ButtonTemplate( text = "Potems", buttons = [btn1, btn2] ) self.send(btns.to_dict(),"RESPONSE") if "Studijos" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda DUK apie Studij program / dalyk keitim bei gretutines studijas / individual studij plan", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "Finansai" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda DUK apie mokesius u moksl bei stipendijas", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "BUS" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda DUK apie Bendrasias universitetines studijas", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "PD" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda DUK apie Pasirenkamuosius dalykus", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "VU SA" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda DUK apie VU SA bei VU SA MIF", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "LSP" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda DUK apie LSP", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "Apeliacijos" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda DUK apie Apeliacijas bei skundus", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) def optin(self, message): pass def init_bot(self): greeting_text = GreetingText("VU SA MIF konsultavimas") messenger_profile = MessengerProfile(greetings=[greeting_text]) messenger.set_messenger_profile(messenger_profile.to_dict()) get_started = GetStartedButton(payload="start") messenger_profile = MessengerProfile(get_started=get_started) messenger.set_messenger_profile(messenger_profile.to_dict()) app = Flask(__name__) app.debug = True messenger = Messenger(ACCESS_TOKEN) @app.route("/", methods=["GET", "POST"]) def webhook(): if request.method == "GET": if request.args.get("hub.verify_token") == VERIFY_TOKEN: messenger.init_bot() return request.args.get("hub.challenge") raise ValueError("FB_VERIFY_TOKEN does not match.") elif request.method == "POST": messenger.handle(request.get_json(force=True)) return "" if __name__ == "__main__": app.run(host="0.0.0.0")
41.395604
168
0.591585
import os from flask import Flask, request from fbmessenger import BaseMessenger from fbmessenger import quick_replies from fbmessenger.elements import Text from fbmessenger.thread_settings import GreetingText, GetStartedButton, MessengerProfile from fbmessenger import elements from fbmessenger import templates ACCESS_TOKEN = "Baisiai slaptas" VERIFY_TOKEN = "Dar slaptesnis" class Messenger(BaseMessenger): def __init__(self, page_access_token): self.page_access_token = page_access_token super(Messenger, self).__init__(self.page_access_token) def message(self, message): response = Text(text= str(message["message"]["text"])) action = response.to_dict() res = self.send(action) app.logger.debug("Response: {}".format(res)) def delivery(self, message): pass def read(self, message): pass def account_linking(self, message): pass def postback(self, message): payload = message["postback"]["payload"] print(message["postback"]["payload"]) if "start" in payload: elem = elements.Text("Sveiki, norėdami pasinaudoti VU SA MIF DUK skiltimi, pasirinkite vieną iš žemiau pateiktų temų, kitu atveju užduokite savo klausimą.") self.send(elem.to_dict(),"RESPONSE") btn1 = elements.Button(button_type = "postback", title="VU SA+LSP+Apeliacijos", payload="VU SA+LSP+Apeliacijos") btn2 = elements.Button(button_type = "postback", title="BUS+PD", payload="BUS+PD") btn3 = elements.Button(button_type = "postback", title="Studijos+Finansai", payload="Studijos+Finansai") btns = templates.ButtonTemplate( text = "DUK temos", buttons = [btn1, btn2, btn3] ) self.send(btns.to_dict(),"RESPONSE") if "VU SA+LSP+Apeliacijos" == payload: btn1 = elements.Button(button_type = "postback", title="VU SA", payload="VU SA") btn2 = elements.Button(button_type = "postback", title="LSP", payload="LSP") btn3 = elements.Button(button_type = "postback", title="Apeliacijos", payload="Apeliacijos") btns = templates.ButtonTemplate( text = "Potemės", buttons = [btn1, btn2, btn3] ) self.send(btns.to_dict(),"RESPONSE") if "BUS+PD" == payload: btn1 = elements.Button(button_type = "postback", title="BUS", payload="BUS") btn2 = elements.Button(button_type = "postback", title="PD", payload="PD") btns = templates.ButtonTemplate( text = "Potemės", buttons = [btn1, btn2] ) self.send(btns.to_dict(),"RESPONSE") if "Studijos+Finansai" == payload: btn1 = elements.Button(button_type = "postback", title="Studijos", payload="Studijos") btn2 = elements.Button(button_type = "postback", title="Finansai", payload="Finansai") btns = templates.ButtonTemplate( text = "Potemės", buttons = [btn1, btn2] ) self.send(btns.to_dict(),"RESPONSE") if "Studijos" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda į DUK apie Studijų programų / dalykų keitimą bei gretutines studijas / individualų studijų planą", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "Finansai" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda į DUK apie mokesčius už mokslą bei stipendijas", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "BUS" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda į DUK apie Bendrasias universitetines studijas", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "PD" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda į DUK apie Pasirenkamuosius dalykus", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "VU SA" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda į DUK apie VU SA bei VU SA MIF", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "LSP" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda į DUK apie LSP", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) if "Apeliacijos" == payload: btn1 = elements.Button( button_type = "web_url", title="Atsakymai", url="https://docs.google.com/document/d/1e_1jSsdjlfoIYJrIuCZELJX0nv4F5IIp2ar-CMUmn98/edit" ) btns = templates.ButtonTemplate( text = "Nuoroda į DUK apie Apeliacijas bei skundus", buttons = [btn1] ) print(self.send(btns.to_dict(),"RESPONSE")) def optin(self, message): pass def init_bot(self): greeting_text = GreetingText("VU SA MIF konsultavimas") messenger_profile = MessengerProfile(greetings=[greeting_text]) messenger.set_messenger_profile(messenger_profile.to_dict()) get_started = GetStartedButton(payload="start") messenger_profile = MessengerProfile(get_started=get_started) messenger.set_messenger_profile(messenger_profile.to_dict()) app = Flask(__name__) app.debug = True messenger = Messenger(ACCESS_TOKEN) @app.route("/", methods=["GET", "POST"]) def webhook(): if request.method == "GET": if request.args.get("hub.verify_token") == VERIFY_TOKEN: messenger.init_bot() return request.args.get("hub.challenge") raise ValueError("FB_VERIFY_TOKEN does not match.") elif request.method == "POST": messenger.handle(request.get_json(force=True)) return "" if __name__ == "__main__": app.run(host="0.0.0.0")
56
0
788f66b9fb4748228011a407e6e029dba64a944b
14,173
py
Python
fixture/contact.py
Droriel/python_training
e0fbbf3df4289e5af606d9c752e99cab82c653a6
[ "Apache-2.0" ]
null
null
null
fixture/contact.py
Droriel/python_training
e0fbbf3df4289e5af606d9c752e99cab82c653a6
[ "Apache-2.0" ]
null
null
null
fixture/contact.py
Droriel/python_training
e0fbbf3df4289e5af606d9c752e99cab82c653a6
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from model.contact import ContactBaseData import re class ContactHelper: def __init__(self, app): self.app = app # additional methods -adding contact def open_main_page(self): wd = self.app.wd if not(wd.current_url.endswith('/addressbook/') and len(wd.find_elements_by_xpath("//strong[contains(.,'Liczba trafie:')]")) > 0): wd.find_element_by_xpath("//a[contains(.,'strona gwna')]").click() def submit_contact(self): wd = self.app.wd wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click() def fill_notes(self, notes): wd = self.app.wd self.app.change_field_value("notes", notes.notes) def fill_additional_data(self, additionalData): wd = self.app.wd # Fill second address and phone # Fill in second address self.app.change_field_value("address2", additionalData.address) # Fill in "Prywatny" phone self.app.change_field_value("phone2", additionalData.phone) def fill_anniversary_date(self, anniversaryDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 1)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 1)).click() # Fill in year self.app.change_field_value("ayear", anniversaryDate.year) def update_anniversary_date(self, anniversaryDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 2)).click() # Fill in year self.app.change_field_value("ayear", anniversaryDate.year) def fill_birth_date(self, birthDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 1)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 1)).click() # Fill in year self.app.change_field_value("byear", birthDate.year) def update_birth_date(self, birthDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 2)).click() # Fill in year self.app.change_field_value("byear", birthDate.year) def fill_www_address(self, www): wd = self.app.wd self.app.change_field_value("homepage", www.www) def fill_emails(self, emails): wd = self.app.wd self.app.change_field_value("email", emails.email1) self.app.change_field_value("email2", emails.email2) self.app.change_field_value("email3", emails.email3) def fill_phone_number(self, phoneNumbers): wd = self.app.wd # Fill in home number self.app.change_field_value("home", phoneNumbers.home) self.app.change_field_value("mobile", phoneNumbers.mobile) self.app.change_field_value("work", phoneNumbers.work) self.app.change_field_value("fax", phoneNumbers.fax) def fill_contact_base_data(self,baseData): wd = self.app.wd self.app.change_field_value("firstname", baseData.firstname) self.app.change_field_value("lastname", baseData.lastname) # self.app.change_field_value("home", phoneNumbers.home) # self.app.change_field_value("mobile", phoneNumbers.mobile) # self.app.change_field_value("work", phoneNumbers.work) # self.app.change_field_value("phone2", additionalData.phone) # self.app.change_field_value("email", emails.email1) # self.app.change_field_value("email2", emails.email2) # self.app.change_field_value("email3", emails.email3) def fill_personal_data(self, personalData): wd = self.app.wd self.app.change_field_value("middlename", personalData.middlename) self.app.change_field_value("nickname", personalData.nickname) # Add photo # wd.find_element_by_name("photo").click() self.app.change_field_value("title", personalData.title) self.app.change_field_value("company", personalData.company) self.app.change_field_value("address", personalData.address) def init_new_contact(self): wd = self.app.wd wd.find_element_by_link_text("nowy wpis").click() def choose_by_id_contact(self, contact_id): wd = self.app.wd wd.find_element_by_xpath("//input[@id='%s']" % contact_id).click() def delete_first_contact(self): wd = self.app.wd self.delete_contact_by_index(0) def delete_contact_by_index(self, index): wd = self.app.wd self.open_main_page() # Choose first contact wd.find_elements_by_name("selected[]")[index].click() # Submit contact deletation wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click() # closing alert window wd.switch_to_alert().accept() self.contact_cache = None def delete_contact_by_id(self, contact_id): wd = self.app.wd self.open_main_page() self.choose_by_id_contact(contact_id) # Submit contact deletation wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click() # closing alert window wd.switch_to_alert().accept() self.contact_cache = None def delete_all_contacts(self): wd = self.app.wd self.open_main_page() # Choose all contacts # //form[@name='MainForm']/input[2] wd.find_element_by_xpath("//input[@id='MassCB']").click() # Submit contact deletation wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click() # closing alert window wd.switch_to_alert().accept() self.contact_cache = None def init_first_contact_edition(self): wd = self.app.wd self.init_by_index_contact_edition(0) def init_by_index_contact_edition(self,index): wd = self.app.wd self.open_main_page() wd.find_elements_by_xpath("//img[@title='Edytuj']")[index].click() def init_by_id_contact_edition(self, contact_id): wd = self.app.wd self.open_main_page() wd.find_element_by_xpath("//a[contains(@href,'edit.php?id=%s')]/img" % contact_id).click() def open_contact_view_by_index(self, index): wd = self.app.wd self.open_main_page() wd.find_elements_by_xpath("//img[@alt='Szczegy']")[index].click() def update_contact_top(self): wd = self.app.wd wd.find_element_by_xpath("//input[@value='Aktualizuj'][1]").click() self.contact_cache = None def update_contact_bottom(self): wd = self.app.wd wd.find_element_by_xpath("//input[@value='Aktualizuj'][2]").click() self.contact_cache = None def delete_edited_contact(self): wd = self.app.wd wd.find_element_by_xpath("//input[@value='Usu']").click() self.contact_cache = None def add_contact_to_group(self, contact_id, group_id): wd = self.app.wd self.open_main_page() # choosing contact self.choose_by_id_contact(contact_id) # choosing group from dropdown for adding # wd.find_element_by_xpath("//select[@name='to_group']").click() wd.find_element_by_xpath("//select[@name='to_group']/option[@value='%s']" % group_id).click() # Submit wd.find_element_by_xpath("//input[@name='add']").click() def delete_contact_from_group(self, contact_id, group_id): wd = self.app.wd self.open_main_page() # group choosing from dropdown for viewing contacts in group wd.find_element_by_xpath("//select[@name='group']/option[@value='%s']" % group_id).click() # waiting for the refresh of content wait = WebDriverWait(wd, 10) wait.until(lambda d: d.find_element_by_xpath("//input[@name='remove']")) # choosing contact self.choose_by_id_contact(contact_id) # Submit wd.find_element_by_xpath("//input[@name='remove']").click() # counting elements on the list def count(self): wd = self.app.wd self.open_main_page() return len(wd.find_elements_by_name("selected[]")) contact_cache = None def get_contact_list(self): wd = self.app.wd self.open_main_page() self.contact_cache = [] for row in wd.find_elements_by_name('entry'): cells = row.find_elements_by_tag_name('td') id = cells[0].find_element_by_tag_name('input').get_attribute('value') # . przed // oznacza relatywne uycie xpatha - jakby tworzya nowy dom w ramach wiersza # text1 = element.find_element_by_xpath(".//td[2]").text # text2 = element.find_element_by_xpath(".//td[3]").text # lastName = row.find_element_by_css_selector('*>td:nth-of-type(2)').text # firstName = row.find_element_by_css_selector('*>td:nth-of-type(3)').text firstName = cells[2].text lastName = cells[1].text allPhones = cells[5].text allEmails = cells[4].text address = cells[3].text self.contact_cache.append(ContactBaseData(firstname=firstName, lastname=lastName, id=id, address=address, allPhonesFromHomePage=allPhones, allEmailsFromHomePage=allEmails)) return list(self.contact_cache) def get_contact_info_from_edit_page(self, index): wd = self.app.wd self.init_by_index_contact_edition(index) id = wd.find_element_by_name('id').get_attribute('value') firstname = wd.find_element_by_name('firstname').get_attribute('value') lastname = wd.find_element_by_name('lastname').get_attribute('value') address = wd.find_element_by_name('address').get_attribute('value') homephone = wd.find_element_by_name('home').get_attribute('value') workphone = wd.find_element_by_name('work').get_attribute('value') mobilephone = wd.find_element_by_name('mobile').get_attribute('value') additionalphone = wd.find_element_by_name('phone2').get_attribute('value') email1 = wd.find_element_by_name('email').get_attribute('value') email2 = wd.find_element_by_name('email2').get_attribute('value') email3 = wd.find_element_by_name('email3').get_attribute('value') return ContactBaseData(firstname=firstname, lastname=lastname, id=id, homephone=homephone, workphone=workphone, mobilephone=mobilephone, additionalphone=additionalphone, email1=email1, email2=email2, email3=email3, address=address) def get_contact_info_from_view_page(self, index): wd = self.app.wd self.open_contact_view_by_index(index) text = wd.find_element_by_id('content').text if re.search('H:\s(.*)', text) is not None: homephone = re.search('H:\s(.*)', text).group(1) else: homephone = None if re.search('W:\s(.*)', text) is not None: workphone = re.search('W:\s(.*)', text).group(1) else: workphone = None if re.search('M:\s(.*)', text) is not None: mobilephone = re.search('M:\s(.*)', text).group(1) else: mobilephone = None if re.search('P:\s(.*)', text) is not None: additionalphone = re.search('P:\s(.*)', text).group(1) else: additionalphone = None # allEmails = wd.find_elements_by_xpath("//a[starts-with(@href, 'mailto:')]") allEmails = [] for i in range(0, len(wd.find_elements_by_xpath("//a[starts-with(@href, 'mailto:')]"))): allEmails.append(wd.find_elements_by_xpath("//a[starts-with(@href, 'mailto:')]")[i].text) return ContactBaseData(homephone=homephone, workphone=workphone, mobilephone=mobilephone, additionalphone=additionalphone, allEmailsFromHomePage=allEmails)
46.621711
146
0.627672
# -*- coding: utf-8 -*- from selenium.webdriver.support.wait import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from model.contact import ContactBaseData import re class ContactHelper: def __init__(self, app): self.app = app # additional methods -adding contact def open_main_page(self): wd = self.app.wd if not(wd.current_url.endswith('/addressbook/') and len(wd.find_elements_by_xpath("//strong[contains(.,'Liczba trafień:')]")) > 0): wd.find_element_by_xpath("//a[contains(.,'strona główna')]").click() def submit_contact(self): wd = self.app.wd wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click() def fill_notes(self, notes): wd = self.app.wd self.app.change_field_value("notes", notes.notes) def fill_additional_data(self, additionalData): wd = self.app.wd # Fill second address and phone # Fill in second address self.app.change_field_value("address2", additionalData.address) # Fill in "Prywatny" phone self.app.change_field_value("phone2", additionalData.phone) def fill_anniversary_date(self, anniversaryDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 1)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 1)).click() # Fill in year self.app.change_field_value("ayear", anniversaryDate.year) def update_anniversary_date(self, anniversaryDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[3]//option[%s]" % str(anniversaryDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[4]//option[%s]" % str(anniversaryDate.month + 2)).click() # Fill in year self.app.change_field_value("ayear", anniversaryDate.year) def fill_birth_date(self, birthDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 1)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 1)).click() # Fill in year self.app.change_field_value("byear", birthDate.year) def update_birth_date(self, birthDate): wd = self.app.wd # Choose in day if not wd.find_element_by_xpath( "//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[%s]" % str(birthDate.day + 2)).click() # Choose in month if not wd.find_element_by_xpath( "//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 2)).is_selected(): wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[%s]" % str(birthDate.month + 2)).click() # Fill in year self.app.change_field_value("byear", birthDate.year) def fill_www_address(self, www): wd = self.app.wd self.app.change_field_value("homepage", www.www) def fill_emails(self, emails): wd = self.app.wd self.app.change_field_value("email", emails.email1) self.app.change_field_value("email2", emails.email2) self.app.change_field_value("email3", emails.email3) def fill_phone_number(self, phoneNumbers): wd = self.app.wd # Fill in home number self.app.change_field_value("home", phoneNumbers.home) self.app.change_field_value("mobile", phoneNumbers.mobile) self.app.change_field_value("work", phoneNumbers.work) self.app.change_field_value("fax", phoneNumbers.fax) def fill_contact_base_data(self,baseData): wd = self.app.wd self.app.change_field_value("firstname", baseData.firstname) self.app.change_field_value("lastname", baseData.lastname) # self.app.change_field_value("home", phoneNumbers.home) # self.app.change_field_value("mobile", phoneNumbers.mobile) # self.app.change_field_value("work", phoneNumbers.work) # self.app.change_field_value("phone2", additionalData.phone) # self.app.change_field_value("email", emails.email1) # self.app.change_field_value("email2", emails.email2) # self.app.change_field_value("email3", emails.email3) def fill_personal_data(self, personalData): wd = self.app.wd self.app.change_field_value("middlename", personalData.middlename) self.app.change_field_value("nickname", personalData.nickname) # Add photo # wd.find_element_by_name("photo").click() self.app.change_field_value("title", personalData.title) self.app.change_field_value("company", personalData.company) self.app.change_field_value("address", personalData.address) def init_new_contact(self): wd = self.app.wd wd.find_element_by_link_text("nowy wpis").click() def choose_by_id_contact(self, contact_id): wd = self.app.wd wd.find_element_by_xpath("//input[@id='%s']" % contact_id).click() def delete_first_contact(self): wd = self.app.wd self.delete_contact_by_index(0) def delete_contact_by_index(self, index): wd = self.app.wd self.open_main_page() # Choose first contact wd.find_elements_by_name("selected[]")[index].click() # Submit contact deletation wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click() # closing alert window wd.switch_to_alert().accept() self.contact_cache = None def delete_contact_by_id(self, contact_id): wd = self.app.wd self.open_main_page() self.choose_by_id_contact(contact_id) # Submit contact deletation wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click() # closing alert window wd.switch_to_alert().accept() self.contact_cache = None def delete_all_contacts(self): wd = self.app.wd self.open_main_page() # Choose all contacts # //form[@name='MainForm']/input[2] wd.find_element_by_xpath("//input[@id='MassCB']").click() # Submit contact deletation wd.find_element_by_xpath("//div[@id='content']/form[2]/div[2]/input").click() # closing alert window wd.switch_to_alert().accept() self.contact_cache = None def init_first_contact_edition(self): wd = self.app.wd self.init_by_index_contact_edition(0) def init_by_index_contact_edition(self,index): wd = self.app.wd self.open_main_page() wd.find_elements_by_xpath("//img[@title='Edytuj']")[index].click() def init_by_id_contact_edition(self, contact_id): wd = self.app.wd self.open_main_page() wd.find_element_by_xpath("//a[contains(@href,'edit.php?id=%s')]/img" % contact_id).click() def open_contact_view_by_index(self, index): wd = self.app.wd self.open_main_page() wd.find_elements_by_xpath("//img[@alt='Szczegóły']")[index].click() def update_contact_top(self): wd = self.app.wd wd.find_element_by_xpath("//input[@value='Aktualizuj'][1]").click() self.contact_cache = None def update_contact_bottom(self): wd = self.app.wd wd.find_element_by_xpath("//input[@value='Aktualizuj'][2]").click() self.contact_cache = None def delete_edited_contact(self): wd = self.app.wd wd.find_element_by_xpath("//input[@value='Usuń']").click() self.contact_cache = None def add_contact_to_group(self, contact_id, group_id): wd = self.app.wd self.open_main_page() # choosing contact self.choose_by_id_contact(contact_id) # choosing group from dropdown for adding # wd.find_element_by_xpath("//select[@name='to_group']").click() wd.find_element_by_xpath("//select[@name='to_group']/option[@value='%s']" % group_id).click() # Submit wd.find_element_by_xpath("//input[@name='add']").click() def delete_contact_from_group(self, contact_id, group_id): wd = self.app.wd self.open_main_page() # group choosing from dropdown for viewing contacts in group wd.find_element_by_xpath("//select[@name='group']/option[@value='%s']" % group_id).click() # waiting for the refresh of content wait = WebDriverWait(wd, 10) wait.until(lambda d: d.find_element_by_xpath("//input[@name='remove']")) # choosing contact self.choose_by_id_contact(contact_id) # Submit wd.find_element_by_xpath("//input[@name='remove']").click() # counting elements on the list def count(self): wd = self.app.wd self.open_main_page() return len(wd.find_elements_by_name("selected[]")) contact_cache = None def get_contact_list(self): wd = self.app.wd self.open_main_page() self.contact_cache = [] for row in wd.find_elements_by_name('entry'): cells = row.find_elements_by_tag_name('td') id = cells[0].find_element_by_tag_name('input').get_attribute('value') # . przed // oznacza relatywne użycie xpatha - jakby tworzyła nowy dom w ramach wiersza # text1 = element.find_element_by_xpath(".//td[2]").text # text2 = element.find_element_by_xpath(".//td[3]").text # lastName = row.find_element_by_css_selector('*>td:nth-of-type(2)').text # firstName = row.find_element_by_css_selector('*>td:nth-of-type(3)').text firstName = cells[2].text lastName = cells[1].text allPhones = cells[5].text allEmails = cells[4].text address = cells[3].text self.contact_cache.append(ContactBaseData(firstname=firstName, lastname=lastName, id=id, address=address, allPhonesFromHomePage=allPhones, allEmailsFromHomePage=allEmails)) return list(self.contact_cache) def get_contact_info_from_edit_page(self, index): wd = self.app.wd self.init_by_index_contact_edition(index) id = wd.find_element_by_name('id').get_attribute('value') firstname = wd.find_element_by_name('firstname').get_attribute('value') lastname = wd.find_element_by_name('lastname').get_attribute('value') address = wd.find_element_by_name('address').get_attribute('value') homephone = wd.find_element_by_name('home').get_attribute('value') workphone = wd.find_element_by_name('work').get_attribute('value') mobilephone = wd.find_element_by_name('mobile').get_attribute('value') additionalphone = wd.find_element_by_name('phone2').get_attribute('value') email1 = wd.find_element_by_name('email').get_attribute('value') email2 = wd.find_element_by_name('email2').get_attribute('value') email3 = wd.find_element_by_name('email3').get_attribute('value') return ContactBaseData(firstname=firstname, lastname=lastname, id=id, homephone=homephone, workphone=workphone, mobilephone=mobilephone, additionalphone=additionalphone, email1=email1, email2=email2, email3=email3, address=address) def get_contact_info_from_view_page(self, index): wd = self.app.wd self.open_contact_view_by_index(index) text = wd.find_element_by_id('content').text if re.search('H:\s(.*)', text) is not None: homephone = re.search('H:\s(.*)', text).group(1) else: homephone = None if re.search('W:\s(.*)', text) is not None: workphone = re.search('W:\s(.*)', text).group(1) else: workphone = None if re.search('M:\s(.*)', text) is not None: mobilephone = re.search('M:\s(.*)', text).group(1) else: mobilephone = None if re.search('P:\s(.*)', text) is not None: additionalphone = re.search('P:\s(.*)', text).group(1) else: additionalphone = None # allEmails = wd.find_elements_by_xpath("//a[starts-with(@href, 'mailto:')]") allEmails = [] for i in range(0, len(wd.find_elements_by_xpath("//a[starts-with(@href, 'mailto:')]"))): allEmails.append(wd.find_elements_by_xpath("//a[starts-with(@href, 'mailto:')]")[i].text) return ContactBaseData(homephone=homephone, workphone=workphone, mobilephone=mobilephone, additionalphone=additionalphone, allEmailsFromHomePage=allEmails)
16
0
092f1761576ffa817c9655201b6f11db45a1a582
2,041
py
Python
vision/library/tools/engines/text_recognition_tesseract.py
lcmonteiro/space-vision-py
38022c99218de0e1e93ec0bae8d143fa0c787f1d
[ "MIT" ]
1
2019-12-14T20:00:17.000Z
2019-12-14T20:00:17.000Z
vision/library/tools/engines/text_recognition_tesseract.py
lcmonteiro/space-vision-py
38022c99218de0e1e93ec0bae8d143fa0c787f1d
[ "MIT" ]
null
null
null
vision/library/tools/engines/text_recognition_tesseract.py
lcmonteiro/space-vision-py
38022c99218de0e1e93ec0bae8d143fa0c787f1d
[ "MIT" ]
null
null
null
# ################################################################################################ # ------------------------------------------------------------------------------------------------ # File: text_recognition_tesseract_engine.py # Author: Luis Monteiro # # Created on nov 17, 2019, 22:00 PM # ------------------------------------------------------------------------------------------------ # ################################################################################################ # external from pytesseract import image_to_string # ################################################################################################ # ------------------------------------------------------------------------------------------------ # TextRecognitionTesseract # ------------------------------------------------------------------------------------------------ # ################################################################################################ class TextRecognitionTesseract: # # ------------------------------------------------------------------------- # initialization # ------------------------------------------------------------------------- # def __init__(self): super().__init__() # configuration self.__config = ("-l eng --oem 1 --psm 7") # # ------------------------------------------------------------------------- # process # ------------------------------------------------------------------------- # def process(self, frame): # format results return image_to_string(frame, config=self.__config) # ################################################################################################ # ------------------------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------------------------ # ################################################################################################
48.595238
98
0.168055
# ################################################################################################ # ------------------------------------------------------------------------------------------------ # File: text_recognition_tesseract_engine.py # Author: Luis Monteiro # # Created on nov 17, 2019, 22:00 PM # ------------------------------------------------------------------------------------------------ # ################################################################################################ # external from pytesseract import image_to_string # ################################################################################################ # ------------------------------------------------------------------------------------------------ # TextRecognitionTesseract # ------------------------------------------------------------------------------------------------ # ################################################################################################ class TextRecognitionTesseract: # # ------------------------------------------------------------------------- # initialization # ------------------------------------------------------------------------- # def __init__(self): super().__init__() # configuration self.__config = ("-l eng --oem 1 --psm 7") # # ------------------------------------------------------------------------- # process # ------------------------------------------------------------------------- # def process(self, frame): # format results return image_to_string(frame, config=self.__config) # ################################################################################################ # ------------------------------------------------------------------------------------------------ # End # ------------------------------------------------------------------------------------------------ # ################################################################################################
0
0
5111b400d490cda967a1cc070c3b3f72a6cd1341
685
py
Python
music_manuel/osc_server.py
HelsinkiGroup5/Hackathon
eb1c7c5f142fc3dbe83a41a558a1ab8071341d06
[ "MIT" ]
null
null
null
music_manuel/osc_server.py
HelsinkiGroup5/Hackathon
eb1c7c5f142fc3dbe83a41a558a1ab8071341d06
[ "MIT" ]
null
null
null
music_manuel/osc_server.py
HelsinkiGroup5/Hackathon
eb1c7c5f142fc3dbe83a41a558a1ab8071341d06
[ "MIT" ]
null
null
null
import OSC, time #import rtmidi_python as rtmidi #midi_out = rtmidi.MidiOut() #midi_out.open_port(0) def handler(addr, tags, data, client_address): txt = "OSCMessage '%s' from %s: " % (addr, client_address) txt += str(data) print(txt) #num = data[0] #print num #midi_out.send_message([0x90, 192, num]) # Note on #time.sleep(0.5) #midi_out.send_message([0x80, 192, num]) # Note on #print("midi sent") if __name__ == "__main__": s = OSC.OSCServer(('10.100.7.151', 57120)) # listen on localhost, port 57120 s.addMsgHandler('/startup', handler) # call handler() for OSC messages received with the /startup address s.serve_forever()
29.782609
113
0.655474
import OSC, time #import rtmidi_python as rtmidi #midi_out = rtmidi.MidiOut() #midi_out.open_port(0) def handler(addr, tags, data, client_address): txt = "OSCMessage '%s' from %s: " % (addr, client_address) txt += str(data) print(txt) #num = data[0] #print num #midi_out.send_message([0x90, 192, num]) # Note on #time.sleep(0.5) #midi_out.send_message([0x80, 192, num]) # Note on #print("midi sent") if __name__ == "__main__": s = OSC.OSCServer(('10.100.7.151', 57120)) # listen on localhost, port 57120 s.addMsgHandler('/startup', handler) # call handler() for OSC messages received with the /startup address s.serve_forever()
0
0
f1a11d584fb476bc84cfd89ab66c348ee5ba13cc
24,531
py
Python
store/adminshop/views/compras.py
vallemrv/my_store_test
2da624fd02c5f1784464f15b751b488f3dd2bae6
[ "Apache-2.0" ]
null
null
null
store/adminshop/views/compras.py
vallemrv/my_store_test
2da624fd02c5f1784464f15b751b488f3dd2bae6
[ "Apache-2.0" ]
null
null
null
store/adminshop/views/compras.py
vallemrv/my_store_test
2da624fd02c5f1784464f15b751b488f3dd2bae6
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # @Author: Manuel Rodriguez <valle> # @Date: 28-Aug-2017 # @Email: valle.mrv@gmail.com # @Filename: views.py # @Last modified by: valle # @Last modified time: 02-Mar-2018 # @License: Apache license vesion 2.0 from django.forms.models import model_to_dict from django.db.models import Q from django.conf import settings from django.shortcuts import render, redirect try: from django.core.urlresolvers import reverse except ImportError: from django.urls import reverse from django.contrib.auth.decorators import login_required, permission_required from django.template.loader import render_to_string from django.http import HttpResponse #from django.template import Context from django.template.loader import get_template from adminshop.utility import get_documento_compra, get_documento_testeo from adminshop.forms import (CPClientesForm, CPProductosForm, ProductosForm, MODProductosForm, FinTratoForm, ValidarCompra, VistaValidarForm, ModelosForm) from adminshop.models import (Modelos, Clientes, Testeo, ConfigSite, Historial, Firmas, Productos, Compras, Tipos, Direcciones, DocumentoTesteo, ListaTesteo) from adminshop.utility import save_historial, save_doc_firmas, save_doc_testeo from . import (validoDNI, get_first_direccion, set_first_direccion) from tokenapi.http import JsonResponse import threading import base64 import json import trml2pdf import os @login_required(login_url='login_tk') def get_modificar_compra(request, id_compra): pres = Compras.objects.filter(pk=id_compra) if len(pres) > 0: pres = pres[0] vendedor = pres.get_vendedor() producto = pres.producto producto_dict = model_to_dict(producto) producto_dict["cliente"] = vendedor['id'] f_compra = MODProductosForm(producto_dict) modelo = producto.modelo return render (request, "tienda/compras/modificar.html", {"c": vendedor, "form": f_compra, "m": modelo, "f": pres}) return redirect("tienda") @login_required(login_url='login_tk') def modificar_compra(request, id_compra): if request.method == "POST": pres = Compras.objects.filter(pk=id_compra) if len(pres) > 0: pres = pres[0] producto = pres.producto producto.tipo_id = request.POST["tipo"] producto.color = request.POST["color"] producto.modelo_id = request.POST["modelo"] producto.ns_imei = request.POST["ns_imei"] producto.precio_compra = request.POST["precio_compra"] producto.save() pres.vendedor_id = request.POST["cliente"] pres.save() return HttpResponse(reverse("listado_compras")) @login_required(login_url='login_tk') def ch_find_modelo(request): if request.method == "POST": filter = request.POST["filter"] filter_query = Modelos.objects.filter(Q(nombre__contains=filter) | Q(marca__nombre__contains=filter)) return render(request, "tienda/compras/lista_modelos.html", {'query': filter_query, 'change': True }) @login_required(login_url='login_tk') def cancelar_trato(request, id_producto): if request.method == "POST": producto = Productos.objects.get(pk=id_producto) f_p = FinTratoForm(request.POST, instance=producto) if f_p.is_valid(): p = f_p.save() p.estado = "CT" p.save() clientes = Historial.objects.filter(producto_id=p.pk) cliente_id = 1 if len(clientes) > 0: cliente_id = clientes[0].cliente_id #guardamos un historial de la accion realizada save_historial(request.user.id, p.id, cliente_id, "Rechazada la compra del producto..") vaciar_sesison_compra(request) return HttpResponse(reverse("tienda")) else: p = Productos.objects.get(pk=id_producto) p.estado = "CT" p.save() clientes = Historial.objects.filter(producto_id=p.pk) cliente_id = 1 if len(clientes) > 0: cliente_id = clientes[0].cliente_id #guardamos un historial de la accion realizada save_historial(request.user.id, p.id, cliente_id, "Rechazada la compra del producto..") vaciar_sesison_compra(request) return redirect("lista_productos", estado="TD") @login_required(login_url='login_tk') def validar_compra(request, id_compra): if request.method == "POST": compra = Compras.objects.get(pk=id_compra) f = ValidarCompra(request.POST, instance=compra) if f.is_valid(): compra = f.save() vaciar_sesison_compra(request) #Guardamos el documento en la cola para ser firmado save_doc_firmas(request.user.pk, compra.pk, "CP") return redirect("tienda") else: f = VistaValidarForm(instance=compra) return render(request, "tienda/compras/validar_compra.html", {"form": f, "form_error": f.errors }) else: compra = Compras.objects.get(pk=id_compra) f = VistaValidarForm(instance=compra) return render(request, "tienda/compras/validar_compra.html", {"form": f, }) @login_required(login_url='login_tk') def send_sign(request, id_producto): producto = Productos.objects.get(pk=id_producto) compras = Compras.objects.filter(producto__id=id_producto) if len(compras) > 0: compra = compras[0] Firmas.objects.filter(Q(documento_id=compra.pk) & Q(tipo_documento="CP")).delete() threading.Thread(target=send_men_sing, args=(compra,)).start() return render(request, "tienda/compras/sender_sign.html") @login_required(login_url='login_tk') def get_document_by_id(request, id_producto): producto = Productos.objects.get(pk=id_producto) compras = Compras.objects.filter(producto__id=id_producto) compra = Compras() if len(compras) > 0: compra = compras[0] return get_document(producto, compra) @login_required(login_url='login_tk') def find_cliente(request): vaciar_sesison_compra(request) if request.method == "POST" and "DNI" in request.POST: if validoDNI(request.POST["DNI"]): return cp_clientes(request) else: return render(request, 'tienda/compras/find_cliente.html',{ "mensaje": "DNI no valido", "url_tipo": reverse("find_cliente") }) return render(request, 'tienda/compras/find_cliente.html',{ "url_tipo": reverse("find_cliente") }) @login_required(login_url='login_tk') def listado_doc_testeos(request): testeos = DocumentoTesteo.objects.all() return render(request, 'tienda/testeo/listado.html',{ "compras": testeos }) @login_required(login_url='login_tk') def find_doc_testeos(request): filter = request.POST["filter"] if len(filter) > 0 and filter[0].upper() == "T": filter = filter.replace("T", "") filter = filter.replace("t", "") compras = DocumentoTesteo.objects.filter(Q(pk=filter)) else: compras = DocumentoTesteo.objects.filter(Q(cliente__DNI__contains=filter)| Q(cliente__nombre_completo__contains=filter)) return render(request, 'tienda/testeo/listado_ajax.html',{ "compras": compras }) @login_required(login_url='login_tk') def get_doc_testeo_by_id(request, id_doc): doc = DocumentoTesteo.objects.get(pk=id_doc) return doc_testeo(doc) @login_required(login_url='login_tk') def cp_clientes(request): if request.method == 'POST': if "filter" in request.POST: clientes = Clientes.objects.filter(DNI__icontains=request.POST.get('DNI')) if len(clientes) > 0: direccion = get_first_direccion(clientes[0].id) full_data = dict(model_to_dict(direccion).items() + model_to_dict(clientes[0]).items()) form = CPClientesForm (full_data, instance=clientes[0]) titulo = 'Cliente existente' tipo = "comprar" request.session["accion_comprar_dni"] = request.POST.get('DNI') request.session["accion_comprar_pk_cliente"] = clientes[0].pk else: form = CPClientesForm(request.POST) titulo = 'Cliente no existe' tipo = "no_existe" return render(request, 'tienda/compras/clientes_ajax.html', {'form':form, 'titulo': titulo, 'tipo': tipo}) elif len(request.POST) == 2 and "DNI" in request.POST: clientes = Clientes.objects.filter(DNI__icontains=request.POST.get('DNI')) if len(clientes) > 0: direccion = get_first_direccion(clientes[0].id) full_data = dict(model_to_dict(direccion).items() + model_to_dict(clientes[0]).items()) form = CPClientesForm (full_data, instance=clientes[0]) titulo = 'Cliente existente' tipo = "comprar" request.session["accion_comprar_dni"] = request.POST.get('DNI') request.session["accion_comprar_pk_cliente"] = clientes[0].pk else: form = CPClientesForm(request.POST) titulo = 'Cliente no existe' tipo = "no_existe" return render(request, 'tienda/compras/clientes.html', {'form':form, 'titulo': titulo, 'tipo': tipo}) elif len(request.POST) > 2: tipo = "comprar" clientes = Clientes.objects.filter(DNI__icontains=request.POST.get('DNI')) request.session["accion_comprar_dni"] = request.POST.get('DNI') if len(clientes) > 0: form = CPClientesForm(request.POST, instance=clientes[0]) else: form = CPClientesForm(request.POST) if form.is_valid(): cliente = form.save() direccion = set_first_direccion(request.POST, cliente.pk) if type(direccion) == Direcciones: direccion.cliente_id = cliente.pk direccion.save() else: return render(request, 'tienda/compras/clientes.html', {'form':form, 'titulo': "Error al guardar el cliente", 'tipo': tipo, "form_error": form.errors}) request.session["accion_comprar_pk_cliente"] = cliente.pk return render(request, 'tienda/compras/clientes.html', {'form':form, 'titulo': "Cliente guardado o modificado", 'tipo': tipo}) return redirect("find_cliente") @login_required(login_url='login_tk') def listado_compras(request): compras = Compras.objects.all().exclude(tipo_vendedor="NO") return render(request, 'tienda/compras/listado.html',{ "compras": compras }) @login_required(login_url='login_tk') def find_compra(request): filter = request.POST["filter"] if len(filter) > 0 and "c" == filter[0].lower(): filter = filter.replace("C", "") filter = filter.replace("c", "") compras = Compras.objects.filter(Q(codigo_compra__icontains=filter)).exclude(vendedor_id=None) else: compras = Compras.objects.filter(Q(codigo_compra__icontains=filter)| Q(producto__ns_imei__icontains=filter)).exclude(vendedor_id=None) return render(request, 'tienda/compras/listado_ajax.html',{ "compras": compras }) @login_required(login_url='login_tk') def cp_lista_modelos(request): if request.method == "POST": filter = request.POST["filter"] filter_query = Modelos.objects.filter(Q(nombre__icontains=filter)) return render(request, "tienda/compras/lista_modelos.html", {'query': filter_query}) @login_required(login_url='login_tk') def send_para_tester(request, id_modelo): if "accion_comprar_dni" in request.session: try: producto = Productos.objects.get(ns_imei=request.POST.get("ns_imei")) form = CPProductosForm(request.POST, instance=producto) except Exception as p: form = CPProductosForm(request.POST) if form.is_valid(): producto = form.save(commit=False) producto.modelo_id = request.session["accion_comprar_pk_modelo"] producto.estado = "OS" producto.tipo_id = 1 producto.precio_compra = producto.modelo.precio_usado producto.save() request.session["accion_comprar_pk_producto"] = producto.pk #Guardamos el histarial de la accion Realizada save_historial(request.user.pk, request.session["accion_comprar_pk_cliente"], producto.pk, "Entrada para testeo posible compra") #Creamos el documento de recepcin de terminal. doc = save_doc_testeo(request.user.pk, request.session["accion_comprar_pk_cliente"], producto.pk) #Guradamos el documen to para firmar save_doc_firmas(request.user.pk, doc.id, "OS") vaciar_sesison_compra(request) return JsonResponse({"result": True}) else: return redirect("tienda") @login_required(login_url='login_tk') def cp_productos(request, id_modelo=-1): if "accion_comprar_dni" in request.session: if request.method != "POST" and id_modelo < 0: f_modelo = ModelosForm() return render(request, 'tienda/compras/find_modelos.html', {"form": f_modelo}) elif request.method != "POST" and id_modelo > 0: request.session["accion_comprar_pk_modelo"] = id_modelo try: modelo = Modelos.objects.get(pk=id_modelo) except: modelo = Modelos() tipo = "no_existe" form = CPProductosForm() return render(request, 'tienda/compras/productos.html', {'form':form, 'titulo': "Datos del producto", 'modelo': modelo, 'tipo': tipo}) else: try: producto = Productos.objects.get(ns_imei=request.POST.get("ns_imei")) form = CPProductosForm(request.POST, instance=producto) except Exception as p: form = CPProductosForm(request.POST) if form.is_valid(): producto = form.save(commit=False) if "accion_comprar_pk_modelo" not in request.session: vaciar_sesison_compra(request) return redirect("tienda") producto.modelo_id = request.session["accion_comprar_pk_modelo"] producto.estado = "TD" #tipos = Tipos.objects.all() #if len(tipos) > 0: # tipo = tipos[0].pk #else: # tipo = -1 #producto.tipo_id = tipo producto.precio_compra = producto.modelo.precio_usado producto.save() request.session["accion_comprar_pk_producto"] = producto.pk save_historial(request.user.pk, request.session["accion_comprar_pk_cliente"], request.session["accion_comprar_pk_producto"], "Producto comprado sin testear") form = ProductosForm(instance=producto) return render(request, 'tienda/compras/compras.html', {'form':form, 'titulo': "Datos del producto", "form_error": form.errors, "id_modelo": request.session["accion_comprar_pk_modelo"]}) else: return redirect("tienda") @login_required(login_url='login_tk') def calcular_precio_usado(request, id_modelo): if request.method == "POST": tipo = Tipos.objects.get(pk=request.POST["tipo"]) modelo = Modelos.objects.get(pk=id_modelo) return HttpResponse("{0:.2f}".format(float(tipo.incremento)*float(modelo.precio_usado))) else: return redirect("tienda") @login_required(login_url='login_tk') def hacer_compra(request): if request.method == "POST": try: producto = Productos.objects.get(pk=request.session["accion_comprar_pk_producto"]) producto.tipo_id = request.POST["tipo"] producto.precio_compra = request.POST["precio_compra"] producto.estado = "ST" producto.save() except Exception as error: return HttpResponse(reverse("en_construccion")) estan_todos = True estan_todos = estan_todos and "accion_comprar_pk_cliente" in request.session estan_todos = estan_todos and "accion_comprar_pk_producto" in request.session estan_todos = estan_todos and "accion_comprar_pk_modelo" in request.session if estan_todos: compra = guardar_compra(request.session["accion_comprar_pk_cliente"], request.session["accion_comprar_pk_producto"], request.user.id, "Realizada la compra del producto") return HttpResponse(reverse("validar_compra", args=[str(compra.id)])) else: return HttpResponse(reverse("tienda")) @login_required(login_url='login_tk') def trato_compra(request, id_producto): if request.method == "POST": producto = Productos.objects.get(pk=id_producto) f_p = FinTratoForm(request.POST, instance=producto) if f_p.is_valid(): p = f_p.save() p.estado = "ST" p.save() clientes = Historial.objects.filter(producto_id=p.pk) cliente_id = 1 if len(clientes) > 0: cliente_id = clientes[0].cliente_id compra = guardar_compra(cliente_id, p.id, request.user.id, "Realizada la compra del producto. Despues de testear") return HttpResponse(reverse("validar_compra", args=[compra.id])) else: producto = Productos.objects.get(pk=id_producto) if producto.tipo == None: producto.tipo = Tipos.objects.all()[0] producto.precio_compra = "{0:.2f}".format(producto.modelo.precio_usado * producto.tipo.incremento) producto.save() filter_query = Testeo.objects.filter(producto_id=id_producto) lista_ids = filter_query.values_list("descripcion_id", flat=True) no_realizaos = ListaTesteo.objects.filter(categoria=producto.modelo.categoria) return render(request, "tienda/compras/trato_compra.html", {'query': filter_query.exclude(estado="OK"), "p": producto, "no_realizados": no_realizaos.exclude(pk__in=lista_ids), "form": FinTratoForm(instance=producto)}) @login_required(login_url='login_tk') def cancelar_compra(request): if request.method == "POST": try: producto = Productos.objects.get(pk=request.session["accion_comprar_pk_producto"]) producto.tipo_id = request.POST["tipo"] producto.precio_compra = request.POST["precio_compra"] producto.estado = "CT" producto.save() except: return HttpResponse(reverse("tienda")) estan_todos = True estan_todos = estan_todos and "accion_comprar_pk_cliente" in request.session estan_todos = estan_todos and "accion_comprar_pk_producto" in request.session estan_todos = estan_todos and "accion_comprar_pk_modelo" in request.session if estan_todos: #Guardamos historial de la cancelacion de la comprar save_historial(request.user.id, request.session["accion_comprar_pk_cliente"], request.session["accion_comprar_pk_producto"], "Compra cancelada, producto en posesion del cliente") vaciar_sesison_compra(request) return HttpResponse(reverse("tienda")) else: return HttpResponse(reverse("en_construccion")) @login_required(login_url='login_tk') def salir_compra(request): try: producto = Productos.objects.get(pk=request.session["accion_comprar_pk_producto"]) producto.estado = "CT" producto.save() except: pass vaciar_sesison_compra(request) return redirect("tienda") def guardar_compra(cliente_id, producto_id, user_id, detalle): compra = Compras() compra.vendedor_id = cliente_id compra.tipo_vendedor = 'CL' compra.producto_id = producto_id compra.usuario_id = user_id compra.save() #Guardamos el historial save_historial(user_id, cliente_id, user_id, detalle) return compra def vaciar_sesison_compra(request): if "accion_comprar_pk_cliente" in request.session: del request.session["accion_comprar_pk_cliente"] if "accion_comprar_pk_producto" in request.session: del request.session["accion_comprar_pk_producto"] if "accion_comprar_pk_modelo" in request.session: del request.session["accion_comprar_pk_modelo"] if "accion_comprar_dni" in request.session: del request.session["accion_comprar_dni"] def get_document_by_code(request, code): datos = json.loads(base64.b64decode(code)) compras = Compras.objects.filter(pk=datos["id_compra"]) compra = Compras() if len(compras) > 0: compra = compras[0] producto = Productos.objects.get(pk=compra.producto.pk) return get_document(producto, compra) return redirect('https://google.es') def get_document(producto, compra): response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'inline; filename="%s.pdf"' % producto.modelo doc_compra = get_documento_compra(producto, compra) response.write(doc_compra.getvalue()) return response def send_men_sing(compra): vendedor = compra.get_vendedor() datos = { "id_compra": compra.id, "codigo_compra": str(compra.codigo_compra), "email": vendedor['email'], } send_data = base64.b64encode(json.dumps(datos)) url = settings.BASE_URL + reverse("sign_compra", args=[send_data]) from django.core.mail import send_mail from django.template.loader import render_to_string msg_plain = render_to_string(settings.BASE_DIR+'/templates/email/url_sign.html', {'nombre': vendedor['nombre'], "url": url}) send_mail( 'Firmar y aceptar condiciones', msg_plain, "info@freakmedia.es", [datos['email']], ) def sign_compra(request, code): datos = json.loads(base64.b64decode(code)) compras = Compras.objects.filter(pk=datos["id_compra"]) datos_send = None if len(compras) > 0: compra = compras[0] if compra.firma == '': vendedor = compra.get_vendedor() datos_send= { "pk": datos["id_compra"], "id_producto": compra.producto.pk, "nombre": vendedor["nombre"], "telefono": vendedor['telefono'], "DNI": vendedor["DNI"].upper(), "domicilio": vendedor['direccion'], "ns_imei": compra.producto.ns_imei, "precio_compra": str(compra.producto.precio_compra), "code": code } return render(request, "tienda/compras/sign.html", {"datos":datos_send}) else: return redirect("get_document_by_code", code=code ) return redirect('tienda') def doc_testeo(doc): tmpl_path = settings.DOCUMENT_TMPL response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'inline; filename="testeo_%s.pdf"' % doc.producto pdfstr = get_documento_testeo(doc) response.write(pdfstr.getvalue()) return response
40.148936
107
0.61832
# -*- coding: utf-8 -*- # @Author: Manuel Rodriguez <valle> # @Date: 28-Aug-2017 # @Email: valle.mrv@gmail.com # @Filename: views.py # @Last modified by: valle # @Last modified time: 02-Mar-2018 # @License: Apache license vesion 2.0 from django.forms.models import model_to_dict from django.db.models import Q from django.conf import settings from django.shortcuts import render, redirect try: from django.core.urlresolvers import reverse except ImportError: from django.urls import reverse from django.contrib.auth.decorators import login_required, permission_required from django.template.loader import render_to_string from django.http import HttpResponse #from django.template import Context from django.template.loader import get_template from adminshop.utility import get_documento_compra, get_documento_testeo from adminshop.forms import (CPClientesForm, CPProductosForm, ProductosForm, MODProductosForm, FinTratoForm, ValidarCompra, VistaValidarForm, ModelosForm) from adminshop.models import (Modelos, Clientes, Testeo, ConfigSite, Historial, Firmas, Productos, Compras, Tipos, Direcciones, DocumentoTesteo, ListaTesteo) from adminshop.utility import save_historial, save_doc_firmas, save_doc_testeo from . import (validoDNI, get_first_direccion, set_first_direccion) from tokenapi.http import JsonResponse import threading import base64 import json import trml2pdf import os @login_required(login_url='login_tk') def get_modificar_compra(request, id_compra): pres = Compras.objects.filter(pk=id_compra) if len(pres) > 0: pres = pres[0] vendedor = pres.get_vendedor() producto = pres.producto producto_dict = model_to_dict(producto) producto_dict["cliente"] = vendedor['id'] f_compra = MODProductosForm(producto_dict) modelo = producto.modelo return render (request, "tienda/compras/modificar.html", {"c": vendedor, "form": f_compra, "m": modelo, "f": pres}) return redirect("tienda") @login_required(login_url='login_tk') def modificar_compra(request, id_compra): if request.method == "POST": pres = Compras.objects.filter(pk=id_compra) if len(pres) > 0: pres = pres[0] producto = pres.producto producto.tipo_id = request.POST["tipo"] producto.color = request.POST["color"] producto.modelo_id = request.POST["modelo"] producto.ns_imei = request.POST["ns_imei"] producto.precio_compra = request.POST["precio_compra"] producto.save() pres.vendedor_id = request.POST["cliente"] pres.save() return HttpResponse(reverse("listado_compras")) @login_required(login_url='login_tk') def ch_find_modelo(request): if request.method == "POST": filter = request.POST["filter"] filter_query = Modelos.objects.filter(Q(nombre__contains=filter) | Q(marca__nombre__contains=filter)) return render(request, "tienda/compras/lista_modelos.html", {'query': filter_query, 'change': True }) @login_required(login_url='login_tk') def cancelar_trato(request, id_producto): if request.method == "POST": producto = Productos.objects.get(pk=id_producto) f_p = FinTratoForm(request.POST, instance=producto) if f_p.is_valid(): p = f_p.save() p.estado = "CT" p.save() clientes = Historial.objects.filter(producto_id=p.pk) cliente_id = 1 if len(clientes) > 0: cliente_id = clientes[0].cliente_id #guardamos un historial de la accion realizada save_historial(request.user.id, p.id, cliente_id, "Rechazada la compra del producto..") vaciar_sesison_compra(request) return HttpResponse(reverse("tienda")) else: p = Productos.objects.get(pk=id_producto) p.estado = "CT" p.save() clientes = Historial.objects.filter(producto_id=p.pk) cliente_id = 1 if len(clientes) > 0: cliente_id = clientes[0].cliente_id #guardamos un historial de la accion realizada save_historial(request.user.id, p.id, cliente_id, "Rechazada la compra del producto..") vaciar_sesison_compra(request) return redirect("lista_productos", estado="TD") @login_required(login_url='login_tk') def validar_compra(request, id_compra): if request.method == "POST": compra = Compras.objects.get(pk=id_compra) f = ValidarCompra(request.POST, instance=compra) if f.is_valid(): compra = f.save() vaciar_sesison_compra(request) #Guardamos el documento en la cola para ser firmado save_doc_firmas(request.user.pk, compra.pk, "CP") return redirect("tienda") else: f = VistaValidarForm(instance=compra) return render(request, "tienda/compras/validar_compra.html", {"form": f, "form_error": f.errors }) else: compra = Compras.objects.get(pk=id_compra) f = VistaValidarForm(instance=compra) return render(request, "tienda/compras/validar_compra.html", {"form": f, }) @login_required(login_url='login_tk') def send_sign(request, id_producto): producto = Productos.objects.get(pk=id_producto) compras = Compras.objects.filter(producto__id=id_producto) if len(compras) > 0: compra = compras[0] Firmas.objects.filter(Q(documento_id=compra.pk) & Q(tipo_documento="CP")).delete() threading.Thread(target=send_men_sing, args=(compra,)).start() return render(request, "tienda/compras/sender_sign.html") @login_required(login_url='login_tk') def get_document_by_id(request, id_producto): producto = Productos.objects.get(pk=id_producto) compras = Compras.objects.filter(producto__id=id_producto) compra = Compras() if len(compras) > 0: compra = compras[0] return get_document(producto, compra) @login_required(login_url='login_tk') def find_cliente(request): vaciar_sesison_compra(request) if request.method == "POST" and "DNI" in request.POST: if validoDNI(request.POST["DNI"]): return cp_clientes(request) else: return render(request, 'tienda/compras/find_cliente.html',{ "mensaje": "DNI no valido", "url_tipo": reverse("find_cliente") }) return render(request, 'tienda/compras/find_cliente.html',{ "url_tipo": reverse("find_cliente") }) @login_required(login_url='login_tk') def listado_doc_testeos(request): testeos = DocumentoTesteo.objects.all() return render(request, 'tienda/testeo/listado.html',{ "compras": testeos }) @login_required(login_url='login_tk') def find_doc_testeos(request): filter = request.POST["filter"] if len(filter) > 0 and filter[0].upper() == "T": filter = filter.replace("T", "") filter = filter.replace("t", "") compras = DocumentoTesteo.objects.filter(Q(pk=filter)) else: compras = DocumentoTesteo.objects.filter(Q(cliente__DNI__contains=filter)| Q(cliente__nombre_completo__contains=filter)) return render(request, 'tienda/testeo/listado_ajax.html',{ "compras": compras }) @login_required(login_url='login_tk') def get_doc_testeo_by_id(request, id_doc): doc = DocumentoTesteo.objects.get(pk=id_doc) return doc_testeo(doc) @login_required(login_url='login_tk') def cp_clientes(request): if request.method == 'POST': if "filter" in request.POST: clientes = Clientes.objects.filter(DNI__icontains=request.POST.get('DNI')) if len(clientes) > 0: direccion = get_first_direccion(clientes[0].id) full_data = dict(model_to_dict(direccion).items() + model_to_dict(clientes[0]).items()) form = CPClientesForm (full_data, instance=clientes[0]) titulo = 'Cliente existente' tipo = "comprar" request.session["accion_comprar_dni"] = request.POST.get('DNI') request.session["accion_comprar_pk_cliente"] = clientes[0].pk else: form = CPClientesForm(request.POST) titulo = 'Cliente no existe' tipo = "no_existe" return render(request, 'tienda/compras/clientes_ajax.html', {'form':form, 'titulo': titulo, 'tipo': tipo}) elif len(request.POST) == 2 and "DNI" in request.POST: clientes = Clientes.objects.filter(DNI__icontains=request.POST.get('DNI')) if len(clientes) > 0: direccion = get_first_direccion(clientes[0].id) full_data = dict(model_to_dict(direccion).items() + model_to_dict(clientes[0]).items()) form = CPClientesForm (full_data, instance=clientes[0]) titulo = 'Cliente existente' tipo = "comprar" request.session["accion_comprar_dni"] = request.POST.get('DNI') request.session["accion_comprar_pk_cliente"] = clientes[0].pk else: form = CPClientesForm(request.POST) titulo = 'Cliente no existe' tipo = "no_existe" return render(request, 'tienda/compras/clientes.html', {'form':form, 'titulo': titulo, 'tipo': tipo}) elif len(request.POST) > 2: tipo = "comprar" clientes = Clientes.objects.filter(DNI__icontains=request.POST.get('DNI')) request.session["accion_comprar_dni"] = request.POST.get('DNI') if len(clientes) > 0: form = CPClientesForm(request.POST, instance=clientes[0]) else: form = CPClientesForm(request.POST) if form.is_valid(): cliente = form.save() direccion = set_first_direccion(request.POST, cliente.pk) if type(direccion) == Direcciones: direccion.cliente_id = cliente.pk direccion.save() else: return render(request, 'tienda/compras/clientes.html', {'form':form, 'titulo': "Error al guardar el cliente", 'tipo': tipo, "form_error": form.errors}) request.session["accion_comprar_pk_cliente"] = cliente.pk return render(request, 'tienda/compras/clientes.html', {'form':form, 'titulo': "Cliente guardado o modificado", 'tipo': tipo}) return redirect("find_cliente") @login_required(login_url='login_tk') def listado_compras(request): compras = Compras.objects.all().exclude(tipo_vendedor="NO") return render(request, 'tienda/compras/listado.html',{ "compras": compras }) @login_required(login_url='login_tk') def find_compra(request): filter = request.POST["filter"] if len(filter) > 0 and "c" == filter[0].lower(): filter = filter.replace("C", "") filter = filter.replace("c", "") compras = Compras.objects.filter(Q(codigo_compra__icontains=filter)).exclude(vendedor_id=None) else: compras = Compras.objects.filter(Q(codigo_compra__icontains=filter)| Q(producto__ns_imei__icontains=filter)).exclude(vendedor_id=None) return render(request, 'tienda/compras/listado_ajax.html',{ "compras": compras }) @login_required(login_url='login_tk') def cp_lista_modelos(request): if request.method == "POST": filter = request.POST["filter"] filter_query = Modelos.objects.filter(Q(nombre__icontains=filter)) return render(request, "tienda/compras/lista_modelos.html", {'query': filter_query}) @login_required(login_url='login_tk') def send_para_tester(request, id_modelo): if "accion_comprar_dni" in request.session: try: producto = Productos.objects.get(ns_imei=request.POST.get("ns_imei")) form = CPProductosForm(request.POST, instance=producto) except Exception as p: form = CPProductosForm(request.POST) if form.is_valid(): producto = form.save(commit=False) producto.modelo_id = request.session["accion_comprar_pk_modelo"] producto.estado = "OS" producto.tipo_id = 1 producto.precio_compra = producto.modelo.precio_usado producto.save() request.session["accion_comprar_pk_producto"] = producto.pk #Guardamos el histarial de la accion Realizada save_historial(request.user.pk, request.session["accion_comprar_pk_cliente"], producto.pk, "Entrada para testeo posible compra") #Creamos el documento de recepción de terminal. doc = save_doc_testeo(request.user.pk, request.session["accion_comprar_pk_cliente"], producto.pk) #Guradamos el documen to para firmar save_doc_firmas(request.user.pk, doc.id, "OS") vaciar_sesison_compra(request) return JsonResponse({"result": True}) else: return redirect("tienda") @login_required(login_url='login_tk') def cp_productos(request, id_modelo=-1): if "accion_comprar_dni" in request.session: if request.method != "POST" and id_modelo < 0: f_modelo = ModelosForm() return render(request, 'tienda/compras/find_modelos.html', {"form": f_modelo}) elif request.method != "POST" and id_modelo > 0: request.session["accion_comprar_pk_modelo"] = id_modelo try: modelo = Modelos.objects.get(pk=id_modelo) except: modelo = Modelos() tipo = "no_existe" form = CPProductosForm() return render(request, 'tienda/compras/productos.html', {'form':form, 'titulo': "Datos del producto", 'modelo': modelo, 'tipo': tipo}) else: try: producto = Productos.objects.get(ns_imei=request.POST.get("ns_imei")) form = CPProductosForm(request.POST, instance=producto) except Exception as p: form = CPProductosForm(request.POST) if form.is_valid(): producto = form.save(commit=False) if "accion_comprar_pk_modelo" not in request.session: vaciar_sesison_compra(request) return redirect("tienda") producto.modelo_id = request.session["accion_comprar_pk_modelo"] producto.estado = "TD" #tipos = Tipos.objects.all() #if len(tipos) > 0: # tipo = tipos[0].pk #else: # tipo = -1 #producto.tipo_id = tipo producto.precio_compra = producto.modelo.precio_usado producto.save() request.session["accion_comprar_pk_producto"] = producto.pk save_historial(request.user.pk, request.session["accion_comprar_pk_cliente"], request.session["accion_comprar_pk_producto"], "Producto comprado sin testear") form = ProductosForm(instance=producto) return render(request, 'tienda/compras/compras.html', {'form':form, 'titulo': "Datos del producto", "form_error": form.errors, "id_modelo": request.session["accion_comprar_pk_modelo"]}) else: return redirect("tienda") @login_required(login_url='login_tk') def calcular_precio_usado(request, id_modelo): if request.method == "POST": tipo = Tipos.objects.get(pk=request.POST["tipo"]) modelo = Modelos.objects.get(pk=id_modelo) return HttpResponse("{0:.2f}".format(float(tipo.incremento)*float(modelo.precio_usado))) else: return redirect("tienda") @login_required(login_url='login_tk') def hacer_compra(request): if request.method == "POST": try: producto = Productos.objects.get(pk=request.session["accion_comprar_pk_producto"]) producto.tipo_id = request.POST["tipo"] producto.precio_compra = request.POST["precio_compra"] producto.estado = "ST" producto.save() except Exception as error: return HttpResponse(reverse("en_construccion")) estan_todos = True estan_todos = estan_todos and "accion_comprar_pk_cliente" in request.session estan_todos = estan_todos and "accion_comprar_pk_producto" in request.session estan_todos = estan_todos and "accion_comprar_pk_modelo" in request.session if estan_todos: compra = guardar_compra(request.session["accion_comprar_pk_cliente"], request.session["accion_comprar_pk_producto"], request.user.id, "Realizada la compra del producto") return HttpResponse(reverse("validar_compra", args=[str(compra.id)])) else: return HttpResponse(reverse("tienda")) @login_required(login_url='login_tk') def trato_compra(request, id_producto): if request.method == "POST": producto = Productos.objects.get(pk=id_producto) f_p = FinTratoForm(request.POST, instance=producto) if f_p.is_valid(): p = f_p.save() p.estado = "ST" p.save() clientes = Historial.objects.filter(producto_id=p.pk) cliente_id = 1 if len(clientes) > 0: cliente_id = clientes[0].cliente_id compra = guardar_compra(cliente_id, p.id, request.user.id, "Realizada la compra del producto. Despues de testear") return HttpResponse(reverse("validar_compra", args=[compra.id])) else: producto = Productos.objects.get(pk=id_producto) if producto.tipo == None: producto.tipo = Tipos.objects.all()[0] producto.precio_compra = "{0:.2f}".format(producto.modelo.precio_usado * producto.tipo.incremento) producto.save() filter_query = Testeo.objects.filter(producto_id=id_producto) lista_ids = filter_query.values_list("descripcion_id", flat=True) no_realizaos = ListaTesteo.objects.filter(categoria=producto.modelo.categoria) return render(request, "tienda/compras/trato_compra.html", {'query': filter_query.exclude(estado="OK"), "p": producto, "no_realizados": no_realizaos.exclude(pk__in=lista_ids), "form": FinTratoForm(instance=producto)}) @login_required(login_url='login_tk') def cancelar_compra(request): if request.method == "POST": try: producto = Productos.objects.get(pk=request.session["accion_comprar_pk_producto"]) producto.tipo_id = request.POST["tipo"] producto.precio_compra = request.POST["precio_compra"] producto.estado = "CT" producto.save() except: return HttpResponse(reverse("tienda")) estan_todos = True estan_todos = estan_todos and "accion_comprar_pk_cliente" in request.session estan_todos = estan_todos and "accion_comprar_pk_producto" in request.session estan_todos = estan_todos and "accion_comprar_pk_modelo" in request.session if estan_todos: #Guardamos historial de la cancelacion de la comprar save_historial(request.user.id, request.session["accion_comprar_pk_cliente"], request.session["accion_comprar_pk_producto"], "Compra cancelada, producto en posesion del cliente") vaciar_sesison_compra(request) return HttpResponse(reverse("tienda")) else: return HttpResponse(reverse("en_construccion")) @login_required(login_url='login_tk') def salir_compra(request): try: producto = Productos.objects.get(pk=request.session["accion_comprar_pk_producto"]) producto.estado = "CT" producto.save() except: pass vaciar_sesison_compra(request) return redirect("tienda") def guardar_compra(cliente_id, producto_id, user_id, detalle): compra = Compras() compra.vendedor_id = cliente_id compra.tipo_vendedor = 'CL' compra.producto_id = producto_id compra.usuario_id = user_id compra.save() #Guardamos el historial save_historial(user_id, cliente_id, user_id, detalle) return compra def vaciar_sesison_compra(request): if "accion_comprar_pk_cliente" in request.session: del request.session["accion_comprar_pk_cliente"] if "accion_comprar_pk_producto" in request.session: del request.session["accion_comprar_pk_producto"] if "accion_comprar_pk_modelo" in request.session: del request.session["accion_comprar_pk_modelo"] if "accion_comprar_dni" in request.session: del request.session["accion_comprar_dni"] def get_document_by_code(request, code): datos = json.loads(base64.b64decode(code)) compras = Compras.objects.filter(pk=datos["id_compra"]) compra = Compras() if len(compras) > 0: compra = compras[0] producto = Productos.objects.get(pk=compra.producto.pk) return get_document(producto, compra) return redirect('https://google.es') def get_document(producto, compra): response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'inline; filename="%s.pdf"' % producto.modelo doc_compra = get_documento_compra(producto, compra) response.write(doc_compra.getvalue()) return response def send_men_sing(compra): vendedor = compra.get_vendedor() datos = { "id_compra": compra.id, "codigo_compra": str(compra.codigo_compra), "email": vendedor['email'], } send_data = base64.b64encode(json.dumps(datos)) url = settings.BASE_URL + reverse("sign_compra", args=[send_data]) from django.core.mail import send_mail from django.template.loader import render_to_string msg_plain = render_to_string(settings.BASE_DIR+'/templates/email/url_sign.html', {'nombre': vendedor['nombre'], "url": url}) send_mail( 'Firmar y aceptar condiciones', msg_plain, "info@freakmedia.es", [datos['email']], ) def sign_compra(request, code): datos = json.loads(base64.b64decode(code)) compras = Compras.objects.filter(pk=datos["id_compra"]) datos_send = None if len(compras) > 0: compra = compras[0] if compra.firma == '': vendedor = compra.get_vendedor() datos_send= { "pk": datos["id_compra"], "id_producto": compra.producto.pk, "nombre": vendedor["nombre"], "telefono": vendedor['telefono'], "DNI": vendedor["DNI"].upper(), "domicilio": vendedor['direccion'], "ns_imei": compra.producto.ns_imei, "precio_compra": str(compra.producto.precio_compra), "code": code } return render(request, "tienda/compras/sign.html", {"datos":datos_send}) else: return redirect("get_document_by_code", code=code ) return redirect('tienda') def doc_testeo(doc): tmpl_path = settings.DOCUMENT_TMPL response = HttpResponse(content_type='application/pdf') response['Content-Disposition'] = 'inline; filename="testeo_%s.pdf"' % doc.producto pdfstr = get_documento_testeo(doc) response.write(pdfstr.getvalue()) return response
2
0
0ce423168f0c5ada928e6758dd88ef4eab04b0fb
431
py
Python
November/Unique Morse Code Words.py
parikshitgupta1/leetcode
eba6c11740dc7597204af127c0f4c2163376294f
[ "MIT" ]
null
null
null
November/Unique Morse Code Words.py
parikshitgupta1/leetcode
eba6c11740dc7597204af127c0f4c2163376294f
[ "MIT" ]
null
null
null
November/Unique Morse Code Words.py
parikshitgupta1/leetcode
eba6c11740dc7597204af127c0f4c2163376294f
[ "MIT" ]
null
null
null
class Solution(object): def uniqueMorseRepresentations(self, words): MORSE = [".-","-...","-.-.","-..",".","..-.","--.", "....","..",".---","-.-",".-..","--","-.", "---",".--.","--.-",".-.","...","-","..-", "...-",".--","-..-","-.--","--.."] seen = {"".join(MORSE[ord(c) - ord('a')] for c in word) for word in words} return len(seen)
35.916667
63
0.278422
class Solution(object): def uniqueMorseRepresentations(self, words): MORSE = [".-","-...","-.-.","-..",".","..-.","--.", "....","..",".---","-.-",".-..","--","-.", "---",".--.","--.-",".-.","...","-","..-", "...-",".--","-..-","-.--","--.."] seen = {"".join(MORSE[ord(c) - ord('a')] for c in word) for word in words} return len(seen)
0
0
2a17da60768a072018629688624a0b3345cf9f9d
16,285
py
Python
tests/unit/test/plan/grammar/test_assertions.py
arareko/pysoa
a90e428558500cf692f7f6e33fd358dd2779c328
[ "Apache-2.0" ]
91
2017-05-08T22:41:33.000Z
2022-02-09T11:37:07.000Z
tests/unit/test/plan/grammar/test_assertions.py
arareko/pysoa
a90e428558500cf692f7f6e33fd358dd2779c328
[ "Apache-2.0" ]
63
2017-06-14T20:08:49.000Z
2021-06-16T23:08:25.000Z
tests/unit/test/plan/grammar/test_assertions.py
arareko/pysoa
a90e428558500cf692f7f6e33fd358dd2779c328
[ "Apache-2.0" ]
26
2017-10-13T23:23:13.000Z
2022-01-11T16:58:17.000Z
from __future__ import ( absolute_import, unicode_literals, ) import unittest from pysoa.common.errors import Error from pysoa.test.plan.grammar import assertions from pysoa.test.plan.grammar.data_types import AnyValue # noinspection PyTypeChecker class TestCustomAssertions(unittest.TestCase): def test_assert_not_wanted_full_match(self): with self.assertRaises(AssertionError): assertions.assert_not_expected( { 'foo': 'bar', 'blah': ['aa', 'bb'], }, { 'foo': 'bar', 'blah': ['aa', 'bb'], }, ) def test_assert_not_wanted_complete_mismatch(self): assertions.assert_not_expected( { 'foo': 'bar', 'blah': ['aa', 'bb'], }, { 'zoom': 'bar', }, ) def test_assert_not_wanted_partial_match(self): with self.assertRaises(AssertionError): assertions.assert_not_expected( { 'foo': 'bar', 'blah': ['aa', 'bb'], }, { 'blah': ['bb'] }, ) def test_assert_not_wanted_errors_array_empty(self): assertions.assert_actual_list_not_subset( [Error(code='INVALID', message=AnyValue('str'), field=AnyValue('str', permit_none=True))], # type: ignore [], ) def test_assert_not_wanted_errors_mismatch_list(self): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAZ', message='Baz message', field=None), ], ) def test_assert_not_wanted_errors_match_list_no_field(self): with self.assertRaises(AssertionError): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_not_wanted_errors_match_list_with_field(self): with self.assertRaises(AssertionError): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), ], ) def test_assert_not_wanted_errors_match_list_with_field_and_extras(self): with self.assertRaises(AssertionError): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAZ', message='Baz message', field=None), ], ) def test_assert_not_wanted_errors_mismatch_message(self): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message='Bar message', field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Qux message', field=None), ], ) def test_assert_not_wanted_errors_mismatch_field(self): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field='bar_field'), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_all_wanted_errors_mismatch_empty_list(self): with self.assertRaises(AssertionError): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [], ) def test_assert_all_wanted_errors_mismatch_empty_list_other_way(self): with self.assertRaises(AssertionError): assertions.assert_lists_match_any_order( [], [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], ) def test_assert_all_wanted_errors_mismatch_missing_error(self): with self.assertRaises(AssertionError): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_all_wanted_errors_match_same_order(self): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_all_wanted_errors_match_different_order(self): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), Error(code='FOO', message='Foo message', field='foo_field'), ], ) def test_assert_any_wanted_error_mismatch_empty_actual_list(self): with self.assertRaises(AssertionError): assertions.assert_expected_list_subset_of_actual( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [], ) def test_assert_any_wanted_error_mismatch_code(self): with self.assertRaises(AssertionError): assertions.assert_expected_list_subset_of_actual( [ Error(code='BAZ', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar Message', field=None), ], ) def test_assert_any_wanted_error_match(self): assertions.assert_expected_list_subset_of_actual( [ Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_any_wanted_error_match_with_field(self): assertions.assert_expected_list_subset_of_actual( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_any_wanted_error_match_with_field_multiples(self): assertions.assert_expected_list_subset_of_actual( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), Error(code='BAZ', message='Baz message', field=None), ], ) def test_assert_subset_structure_none(self): assertions.assert_subset_structure( {'foo': None}, {'foo': None}, subset_lists=True, ) def test_assert_subset_structure_extras(self): assertions.assert_subset_structure( {'foo': 'bar'}, {'foo': 'bar', 'baz': 'qux'}, subset_lists=True, ) def test_assert_subset_structure_mismatch(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': None}, {'foo': 'bar'}, subset_lists=True, msg='Include this in the message', ) self.assertTrue(error_info.exception.args[0].startswith('Include this in the message')) self.assertIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Mismatch values', error_info.exception.args[0]) def test_assert_subset_structure_missing(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': None}, {'baz': 'qux'}, subset_lists=True, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_subset_structure_empty_list_not_empty(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': {'bar': []}}, {'foo': {'bar': ['baz', 'qux']}}, subset_lists=True, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Mismatch values', error_info.exception.args[0]) def test_assert_subset_structure_list_not_exact(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': {'bar': ['baz', 'qux', 'flem']}}, {'foo': {'bar': ['baz', 'qux']}}, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_subset_structure_one_item_not_subset_of_actual_list(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': {'bar': 'flem'}}, {'foo': {'bar': ['baz', 'qux']}}, subset_lists=True, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_subset_structure_one_item_subset_of_actual_list(self): assertions.assert_subset_structure( {'foo': {'bar': 'baz'}}, {'foo': {'bar': ['baz', 'qux']}}, subset_lists=True, ) def test_assert_not_present_but_present(self): with self.assertRaises(AssertionError): assertions.assert_not_present( {'foo': AnyValue('str')}, {'foo': 'Hello', 'bar': 42}, ) def test_assert_not_present_but_present_sub_structure(self): with self.assertRaises(AssertionError): assertions.assert_not_present( {'user': {'foo': AnyValue('str')}}, {'user': {'foo': 'Hello', 'bar': 42}}, ) def test_assert_not_present_not_present(self): assertions.assert_not_present( {'foo': AnyValue('str')}, {'bar': 42}, ) def test_assert_not_present_not_present_sub_structure(self): assertions.assert_not_present( {'user': {'foo': AnyValue('str')}}, {'user': {'bar': 42}}, ) def test_assert_exact_structure_mismatch(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79, 'name': 'Betty'}}, ) self.assertIn('Mismatch values', error_info.exception.args[0]) def test_assert_exact_structure_missing(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'name': 'Roger'}}, ) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_exact_structure_extra(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79, 'name': 'Roger', 'age': 65}}, ) self.assertIn('Extra values', error_info.exception.args[0]) def test_assert_exact_structure_non_empty(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79}}, ) self.assertIn('Extra values', error_info.exception.args[0]) def test_assert_exact_structure_match(self): assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79, 'name': 'Roger'}}, ) def test_assert_exact_structure_list_mismatch(self): with self.assertRaises(AssertionError): assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parents': [79, 86]}, {'user': {'id': 12, 'name': 'Seth'}, 'parents': [79, 86, 51]}, )
41.649616
120
0.55413
from __future__ import ( absolute_import, unicode_literals, ) import unittest from pysoa.common.errors import Error from pysoa.test.plan.grammar import assertions from pysoa.test.plan.grammar.data_types import AnyValue # noinspection PyTypeChecker class TestCustomAssertions(unittest.TestCase): def test_assert_not_wanted_full_match(self): with self.assertRaises(AssertionError): assertions.assert_not_expected( { 'foo': 'bar', 'blah': ['aa', 'bb'], }, { 'foo': 'bar', 'blah': ['aa', 'bb'], }, ) def test_assert_not_wanted_complete_mismatch(self): assertions.assert_not_expected( { 'foo': 'bar', 'blah': ['aa', 'bb'], }, { 'zoom': 'bar', }, ) def test_assert_not_wanted_partial_match(self): with self.assertRaises(AssertionError): assertions.assert_not_expected( { 'foo': 'bar', 'blah': ['aa', 'bb'], }, { 'blah': ['bb'] }, ) def test_assert_not_wanted_errors_array_empty(self): assertions.assert_actual_list_not_subset( [Error(code='INVALID', message=AnyValue('str'), field=AnyValue('str', permit_none=True))], # type: ignore [], ) def test_assert_not_wanted_errors_mismatch_list(self): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAZ', message='Baz message', field=None), ], ) def test_assert_not_wanted_errors_match_list_no_field(self): with self.assertRaises(AssertionError): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_not_wanted_errors_match_list_with_field(self): with self.assertRaises(AssertionError): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), ], ) def test_assert_not_wanted_errors_match_list_with_field_and_extras(self): with self.assertRaises(AssertionError): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAZ', message='Baz message', field=None), ], ) def test_assert_not_wanted_errors_mismatch_message(self): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message='Bar message', field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Qux message', field=None), ], ) def test_assert_not_wanted_errors_mismatch_field(self): assertions.assert_actual_list_not_subset( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field='bar_field'), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_all_wanted_errors_mismatch_empty_list(self): with self.assertRaises(AssertionError): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [], ) def test_assert_all_wanted_errors_mismatch_empty_list_other_way(self): with self.assertRaises(AssertionError): assertions.assert_lists_match_any_order( [], [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], ) def test_assert_all_wanted_errors_mismatch_missing_error(self): with self.assertRaises(AssertionError): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_all_wanted_errors_match_same_order(self): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_all_wanted_errors_match_different_order(self): assertions.assert_lists_match_any_order( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='BAR', message='Bar message', field=None), Error(code='FOO', message='Foo message', field='foo_field'), ], ) def test_assert_any_wanted_error_mismatch_empty_actual_list(self): with self.assertRaises(AssertionError): assertions.assert_expected_list_subset_of_actual( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [], ) def test_assert_any_wanted_error_mismatch_code(self): with self.assertRaises(AssertionError): assertions.assert_expected_list_subset_of_actual( [ Error(code='BAZ', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar Message', field=None), ], ) def test_assert_any_wanted_error_match(self): assertions.assert_expected_list_subset_of_actual( [ Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_any_wanted_error_match_with_field(self): assertions.assert_expected_list_subset_of_actual( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), ], ) def test_assert_any_wanted_error_match_with_field_multiples(self): assertions.assert_expected_list_subset_of_actual( [ Error(code='FOO', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore Error(code='BAR', message=AnyValue('str'), field=AnyValue('str', permit_none=True)), # type: ignore ], [ Error(code='FOO', message='Foo message', field='foo_field'), Error(code='BAR', message='Bar message', field=None), Error(code='BAZ', message='Baz message', field=None), ], ) def test_assert_subset_structure_none(self): assertions.assert_subset_structure( {'foo': None}, {'foo': None}, subset_lists=True, ) def test_assert_subset_structure_extras(self): assertions.assert_subset_structure( {'foo': 'bar'}, {'foo': 'bar', 'baz': 'qux'}, subset_lists=True, ) def test_assert_subset_structure_mismatch(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': None}, {'foo': 'bar'}, subset_lists=True, msg='Include this in the message', ) self.assertTrue(error_info.exception.args[0].startswith('Include this in the message')) self.assertIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Mismatch values', error_info.exception.args[0]) def test_assert_subset_structure_missing(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': None}, {'baz': 'qux'}, subset_lists=True, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_subset_structure_empty_list_not_empty(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': {'bar': []}}, {'foo': {'bar': ['baz', 'qux']}}, subset_lists=True, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Mismatch values', error_info.exception.args[0]) def test_assert_subset_structure_list_not_exact(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': {'bar': ['baz', 'qux', 'flem']}}, {'foo': {'bar': ['baz', 'qux']}}, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_subset_structure_one_item_not_subset_of_actual_list(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_subset_structure( {'foo': {'bar': 'flem'}}, {'foo': {'bar': ['baz', 'qux']}}, subset_lists=True, ) self.assertNotIn('DATA ERROR', error_info.exception.args[0]) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_subset_structure_one_item_subset_of_actual_list(self): assertions.assert_subset_structure( {'foo': {'bar': 'baz'}}, {'foo': {'bar': ['baz', 'qux']}}, subset_lists=True, ) def test_assert_not_present_but_present(self): with self.assertRaises(AssertionError): assertions.assert_not_present( {'foo': AnyValue('str')}, {'foo': 'Hello', 'bar': 42}, ) def test_assert_not_present_but_present_sub_structure(self): with self.assertRaises(AssertionError): assertions.assert_not_present( {'user': {'foo': AnyValue('str')}}, {'user': {'foo': 'Hello', 'bar': 42}}, ) def test_assert_not_present_not_present(self): assertions.assert_not_present( {'foo': AnyValue('str')}, {'bar': 42}, ) def test_assert_not_present_not_present_sub_structure(self): assertions.assert_not_present( {'user': {'foo': AnyValue('str')}}, {'user': {'bar': 42}}, ) def test_assert_exact_structure_mismatch(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79, 'name': 'Betty'}}, ) self.assertIn('Mismatch values', error_info.exception.args[0]) def test_assert_exact_structure_missing(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'name': 'Roger'}}, ) self.assertIn('Missing values', error_info.exception.args[0]) def test_assert_exact_structure_extra(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79, 'name': 'Roger', 'age': 65}}, ) self.assertIn('Extra values', error_info.exception.args[0]) def test_assert_exact_structure_non_empty(self): with self.assertRaises(AssertionError) as error_info: assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79}}, ) self.assertIn('Extra values', error_info.exception.args[0]) def test_assert_exact_structure_match(self): assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parent': {'id': AnyValue('int'), 'name': 'Roger'}}, {'user': {'id': 12, 'name': 'Seth'}, 'parent': {'id': 79, 'name': 'Roger'}}, ) def test_assert_exact_structure_list_mismatch(self): with self.assertRaises(AssertionError): assertions.assert_exact_structure( {'user': {'id': 12, 'name': AnyValue('str')}, 'parents': [79, 86]}, {'user': {'id': 12, 'name': 'Seth'}, 'parents': [79, 86, 51]}, )
0
0
0ba1914cfcee88af2ca20a9d79b917b79305d0c5
5,248
py
Python
lbry/tests/integration/test_wallet_server_sessions.py
Nykseli/lbry-sdk
07afc0aa0a1e6c0ef6aa284fb47513af940440c1
[ "MIT" ]
null
null
null
lbry/tests/integration/test_wallet_server_sessions.py
Nykseli/lbry-sdk
07afc0aa0a1e6c0ef6aa284fb47513af940440c1
[ "MIT" ]
4
2020-10-27T21:53:05.000Z
2022-02-11T03:10:54.000Z
lbry/tests/integration/test_wallet_server_sessions.py
braveheart12/lbry-sdk
dc709b468f9dce60d206161785def5c7ace2b763
[ "MIT" ]
null
null
null
import asyncio import socket import time import logging from unittest.mock import Mock from torba.testcase import IntegrationTestCase, Conductor import lbry.wallet from lbry.schema.claim import Claim from lbry.wallet.transaction import Transaction, Output from lbry.wallet.dewies import dewies_to_lbc as d2l, lbc_to_dewies as l2d log = logging.getLogger(__name__) def wrap_callback_event(fn, callback): def inner(*a, **kw): callback() return fn(*a, **kw) return inner class TestSessionBloat(IntegrationTestCase): """ ERROR:asyncio:Fatal read error on socket transport protocol: <lbrynet.wallet.server.session.LBRYElectrumX object at 0x7f7e3bfcaf60> transport: <_SelectorSocketTransport fd=3236 read=polling write=<idle, bufsize=0>> Traceback (most recent call last): File "/usr/lib/python3.7/asyncio/selector_events.py", line 801, in _read_ready__data_received data = self._sock.recv(self.max_size) TimeoutError: [Errno 110] Connection timed out """ LEDGER = lbry.wallet async def asyncSetUp(self): self.conductor = Conductor( ledger_module=self.LEDGER, manager_module=self.MANAGER, verbosity=self.VERBOSITY ) await self.conductor.start_blockchain() self.addCleanup(self.conductor.stop_blockchain) await self.conductor.start_spv() self.session_manager = self.conductor.spv_node.server.session_mgr self.session_manager.servers['TCP'].sockets[0].setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 64) self.session_manager.servers['TCP'].sockets[0].setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 64) self.addCleanup(self.conductor.stop_spv) await self.conductor.start_wallet() self.addCleanup(self.conductor.stop_wallet) self.client_session = list(self.session_manager.sessions)[0] self.client_session.transport.set_write_buffer_limits(0, 0) self.paused_session = asyncio.Event(loop=self.loop) self.resumed_session = asyncio.Event(loop=self.loop) def paused(): self.resumed_session.clear() self.paused_session.set() def delayed_resume(): self.paused_session.clear() time.sleep(1) self.resumed_session.set() self.client_session.pause_writing = wrap_callback_event(self.client_session.pause_writing, paused) self.client_session.resume_writing = wrap_callback_event(self.client_session.resume_writing, delayed_resume) self.blockchain = self.conductor.blockchain_node self.wallet_node = self.conductor.wallet_node self.manager = self.wallet_node.manager self.ledger = self.wallet_node.ledger self.wallet = self.wallet_node.wallet self.account = self.wallet_node.wallet.default_account async def test_session_bloat_from_socket_timeout(self): await self.account.ensure_address_gap() address1, address2 = await self.account.receiving.get_addresses(limit=2, only_usable=True) sendtxid1 = await self.blockchain.send_to_address(address1, 5) sendtxid2 = await self.blockchain.send_to_address(address2, 5) await self.blockchain.generate(1) await asyncio.wait([ self.on_transaction_id(sendtxid1), self.on_transaction_id(sendtxid2) ]) self.assertEqual(d2l(await self.account.get_balance()), '10.0') channel = Claim() channel_txo = Output.pay_claim_name_pubkey_hash( l2d('1.0'), '@bar', channel, self.account.ledger.address_to_hash160(address1) ) channel_txo.generate_channel_private_key() channel_txo.script.generate() channel_tx = await Transaction.create([], [channel_txo], [self.account], self.account) stream = Claim() stream.stream.description = "0" * 8000 stream_txo = Output.pay_claim_name_pubkey_hash( l2d('1.0'), 'foo', stream, self.account.ledger.address_to_hash160(address1) ) stream_tx = await Transaction.create([], [stream_txo], [self.account], self.account) stream_txo.sign(channel_txo) await stream_tx.sign([self.account]) self.paused_session.clear() self.resumed_session.clear() await self.broadcast(channel_tx) await self.broadcast(stream_tx) await asyncio.wait_for(self.paused_session.wait(), 2) self.assertEqual(1, len(self.session_manager.sessions)) real_sock = self.client_session.transport._extra.pop('socket') mock_sock = Mock(spec=socket.socket) for attr in dir(real_sock): if not attr.startswith('__'): setattr(mock_sock, attr, getattr(real_sock, attr)) def recv(*a, **kw): raise TimeoutError("[Errno 110] Connection timed out") mock_sock.recv = recv self.client_session.transport._sock = mock_sock self.client_session.transport._extra['socket'] = mock_sock self.assertFalse(self.resumed_session.is_set()) self.assertFalse(self.session_manager.session_event.is_set()) await self.session_manager.session_event.wait() self.assertEqual(0, len(self.session_manager.sessions))
39.458647
116
0.695122
import asyncio import socket import time import logging from unittest.mock import Mock from torba.testcase import IntegrationTestCase, Conductor import lbry.wallet from lbry.schema.claim import Claim from lbry.wallet.transaction import Transaction, Output from lbry.wallet.dewies import dewies_to_lbc as d2l, lbc_to_dewies as l2d log = logging.getLogger(__name__) def wrap_callback_event(fn, callback): def inner(*a, **kw): callback() return fn(*a, **kw) return inner class TestSessionBloat(IntegrationTestCase): """ ERROR:asyncio:Fatal read error on socket transport protocol: <lbrynet.wallet.server.session.LBRYElectrumX object at 0x7f7e3bfcaf60> transport: <_SelectorSocketTransport fd=3236 read=polling write=<idle, bufsize=0>> Traceback (most recent call last): File "/usr/lib/python3.7/asyncio/selector_events.py", line 801, in _read_ready__data_received data = self._sock.recv(self.max_size) TimeoutError: [Errno 110] Connection timed out """ LEDGER = lbry.wallet async def asyncSetUp(self): self.conductor = Conductor( ledger_module=self.LEDGER, manager_module=self.MANAGER, verbosity=self.VERBOSITY ) await self.conductor.start_blockchain() self.addCleanup(self.conductor.stop_blockchain) await self.conductor.start_spv() self.session_manager = self.conductor.spv_node.server.session_mgr self.session_manager.servers['TCP'].sockets[0].setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 64) self.session_manager.servers['TCP'].sockets[0].setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 64) self.addCleanup(self.conductor.stop_spv) await self.conductor.start_wallet() self.addCleanup(self.conductor.stop_wallet) self.client_session = list(self.session_manager.sessions)[0] self.client_session.transport.set_write_buffer_limits(0, 0) self.paused_session = asyncio.Event(loop=self.loop) self.resumed_session = asyncio.Event(loop=self.loop) def paused(): self.resumed_session.clear() self.paused_session.set() def delayed_resume(): self.paused_session.clear() time.sleep(1) self.resumed_session.set() self.client_session.pause_writing = wrap_callback_event(self.client_session.pause_writing, paused) self.client_session.resume_writing = wrap_callback_event(self.client_session.resume_writing, delayed_resume) self.blockchain = self.conductor.blockchain_node self.wallet_node = self.conductor.wallet_node self.manager = self.wallet_node.manager self.ledger = self.wallet_node.ledger self.wallet = self.wallet_node.wallet self.account = self.wallet_node.wallet.default_account async def test_session_bloat_from_socket_timeout(self): await self.account.ensure_address_gap() address1, address2 = await self.account.receiving.get_addresses(limit=2, only_usable=True) sendtxid1 = await self.blockchain.send_to_address(address1, 5) sendtxid2 = await self.blockchain.send_to_address(address2, 5) await self.blockchain.generate(1) await asyncio.wait([ self.on_transaction_id(sendtxid1), self.on_transaction_id(sendtxid2) ]) self.assertEqual(d2l(await self.account.get_balance()), '10.0') channel = Claim() channel_txo = Output.pay_claim_name_pubkey_hash( l2d('1.0'), '@bar', channel, self.account.ledger.address_to_hash160(address1) ) channel_txo.generate_channel_private_key() channel_txo.script.generate() channel_tx = await Transaction.create([], [channel_txo], [self.account], self.account) stream = Claim() stream.stream.description = "0" * 8000 stream_txo = Output.pay_claim_name_pubkey_hash( l2d('1.0'), 'foo', stream, self.account.ledger.address_to_hash160(address1) ) stream_tx = await Transaction.create([], [stream_txo], [self.account], self.account) stream_txo.sign(channel_txo) await stream_tx.sign([self.account]) self.paused_session.clear() self.resumed_session.clear() await self.broadcast(channel_tx) await self.broadcast(stream_tx) await asyncio.wait_for(self.paused_session.wait(), 2) self.assertEqual(1, len(self.session_manager.sessions)) real_sock = self.client_session.transport._extra.pop('socket') mock_sock = Mock(spec=socket.socket) for attr in dir(real_sock): if not attr.startswith('__'): setattr(mock_sock, attr, getattr(real_sock, attr)) def recv(*a, **kw): raise TimeoutError("[Errno 110] Connection timed out") mock_sock.recv = recv self.client_session.transport._sock = mock_sock self.client_session.transport._extra['socket'] = mock_sock self.assertFalse(self.resumed_session.is_set()) self.assertFalse(self.session_manager.session_event.is_set()) await self.session_manager.session_event.wait() self.assertEqual(0, len(self.session_manager.sessions))
0
0
e76f76edb55be5c2cbb5a4d75f8d67fbe7d90f8d
1,924
py
Python
northwind.py
valogonor/DS-Unit-3-Sprint-2-SQL-and-Databases
07c83195c4933d0ce02f431692fe970ef154cacf
[ "MIT" ]
null
null
null
northwind.py
valogonor/DS-Unit-3-Sprint-2-SQL-and-Databases
07c83195c4933d0ce02f431692fe970ef154cacf
[ "MIT" ]
null
null
null
northwind.py
valogonor/DS-Unit-3-Sprint-2-SQL-and-Databases
07c83195c4933d0ce02f431692fe970ef154cacf
[ "MIT" ]
null
null
null
import sqlite3 conn = sqlite3.connect('northwind_small.sqlite3') curs = conn.cursor() query = '''SELECT ProductName FROM Product ORDER BY UnitPrice DESC LIMIT 10''' curs.execute(query) results = curs.fetchall() print('Ten most expensive items (per unit price):') for result in results: print(result[0]) query = '''SELECT avg(HireDate - BirthDate) FROM Employee''' curs.execute(query) print('Average age of an employee at the time of their hiring:', curs.fetchall()[0][0]) query = '''SELECT City, avg(HireDate - BirthDate) as Age FROM Employee GROUP BY City''' curs.execute(query) print('Average age of an employee at the time of their hiring by city:') results = curs.fetchall() for result in results: print(result[0], result[1]) query = '''SELECT ProductName, CompanyName FROM Product INNER JOIN Supplier ON Product.SupplierId = Supplier.Id ORDER BY UnitPrice DESC LIMIT 10''' curs.execute(query) results = curs.fetchall() print('Ten most expensive items (per unit price) and their suppliers:') print('Product', 'Supplier', sep='\t\t\t') for result in results: if len(result[0]) > 15: sep = '\t' else: sep = '\t\t' print(result[0], result[1], sep=sep) query = '''SELECT CategoryName, count(Product.Id) as ProductCount FROM Category INNER JOIN Product ON Category.Id = Product.CategoryId GROUP BY CategoryId ORDER BY ProductCount DESC LIMIT 1''' curs.execute(query) print('Largest category (by number of products in it):', curs.fetchall()[0][0]) query = '''SELECT LastName, FirstName, count(Territory.TerritoryDescription) as TerritoryCount FROM Employee, Territory JOIN EmployeeTerritory ON Employee.Id = EmployeeTerritory.EmployeeId GROUP BY Employee.Id ORDER BY TerritoryCount DESC LIMIT 1''' curs.execute(query) results = curs.fetchall() print('Employee with the most territories, and number of territories they have:', results[0][1], results[0][0] + ';', results[0][2])
30.539683
94
0.730249
import sqlite3 conn = sqlite3.connect('northwind_small.sqlite3') curs = conn.cursor() query = '''SELECT ProductName FROM Product ORDER BY UnitPrice DESC LIMIT 10''' curs.execute(query) results = curs.fetchall() print('Ten most expensive items (per unit price):') for result in results: print(result[0]) query = '''SELECT avg(HireDate - BirthDate) FROM Employee''' curs.execute(query) print('Average age of an employee at the time of their hiring:', curs.fetchall()[0][0]) query = '''SELECT City, avg(HireDate - BirthDate) as Age FROM Employee GROUP BY City''' curs.execute(query) print('Average age of an employee at the time of their hiring by city:') results = curs.fetchall() for result in results: print(result[0], result[1]) query = '''SELECT ProductName, CompanyName FROM Product INNER JOIN Supplier ON Product.SupplierId = Supplier.Id ORDER BY UnitPrice DESC LIMIT 10''' curs.execute(query) results = curs.fetchall() print('Ten most expensive items (per unit price) and their suppliers:') print('Product', 'Supplier', sep='\t\t\t') for result in results: if len(result[0]) > 15: sep = '\t' else: sep = '\t\t' print(result[0], result[1], sep=sep) query = '''SELECT CategoryName, count(Product.Id) as ProductCount FROM Category INNER JOIN Product ON Category.Id = Product.CategoryId GROUP BY CategoryId ORDER BY ProductCount DESC LIMIT 1''' curs.execute(query) print('Largest category (by number of products in it):', curs.fetchall()[0][0]) query = '''SELECT LastName, FirstName, count(Territory.TerritoryDescription) as TerritoryCount FROM Employee, Territory JOIN EmployeeTerritory ON Employee.Id = EmployeeTerritory.EmployeeId GROUP BY Employee.Id ORDER BY TerritoryCount DESC LIMIT 1''' curs.execute(query) results = curs.fetchall() print('Employee with the most territories, and number of territories they have:', results[0][1], results[0][0] + ';', results[0][2])
0
0
e6e666e5f27d346d3402248f6886267f86c56baa
10,930
py
Python
skfda/representation/basis/_basis.py
alejandro-ariza/scikit-fda
a3626eeaac81aac14660233ff7554ae9a1550434
[ "BSD-3-Clause" ]
null
null
null
skfda/representation/basis/_basis.py
alejandro-ariza/scikit-fda
a3626eeaac81aac14660233ff7554ae9a1550434
[ "BSD-3-Clause" ]
null
null
null
skfda/representation/basis/_basis.py
alejandro-ariza/scikit-fda
a3626eeaac81aac14660233ff7554ae9a1550434
[ "BSD-3-Clause" ]
null
null
null
"""Module for functional data manipulation in a basis system. Defines functional data object in a basis function system representation and the corresponding basis classes. """ import copy import warnings from abc import ABC, abstractmethod from typing import Tuple import numpy as np from ..._utils import _domain_range, _reshape_eval_points, _same_domain from . import _fdatabasis def _check_domain(domain_range): for domain in domain_range: if len(domain) != 2 or domain[0] >= domain[1]: raise ValueError(f"The interval {domain} is not well-defined.") class Basis(ABC): """Defines the structure of a basis function system. Attributes: domain_range (tuple): a tuple of length 2 containing the initial and end values of the interval over which the basis can be evaluated. n_basis (int): number of functions in the basis. """ def __init__(self, *, domain_range=None, n_basis: int = 1): """Basis constructor. Args: domain_range (tuple or list of tuples, optional): Definition of the interval where the basis defines a space. Defaults to (0,1). n_basis: Number of functions that form the basis. Defaults to 1. """ if domain_range is not None: domain_range = _domain_range(domain_range) # Some checks _check_domain(domain_range) if n_basis < 1: raise ValueError( "The number of basis has to be strictly positive.", ) self._domain_range = domain_range self._n_basis = n_basis super().__init__() def __call__(self, *args, **kwargs) -> np.ndarray: """Evaluate the basis using :meth:`evaluate`.""" return self.evaluate(*args, **kwargs) @property def dim_domain(self) -> int: return 1 @property def dim_codomain(self) -> int: return 1 @property def domain_range(self) -> Tuple[Tuple[float, float], ...]: if self._domain_range is None: return ((0, 1),) * self.dim_domain else: return self._domain_range @property def n_basis(self) -> int: return self._n_basis @abstractmethod def _evaluate(self, eval_points) -> np.ndarray: """Subclasses must override this to provide basis evaluation.""" pass def evaluate(self, eval_points, *, derivative: int = 0) -> np.ndarray: """Evaluate Basis objects and its derivatives. Evaluates the basis function system or its derivatives at a list of given values. Args: eval_points (array_like): List of points where the basis is evaluated. Returns: Matrix whose rows are the values of the each basis function or its derivatives at the values specified in eval_points. """ if derivative < 0: raise ValueError("derivative only takes non-negative values.") elif derivative != 0: warnings.warn("Parameter derivative is deprecated. Use the " "derivative function instead.", DeprecationWarning) return self.derivative(order=derivative)(eval_points) eval_points = _reshape_eval_points(eval_points, aligned=True, n_samples=self.n_basis, dim_domain=self.dim_domain) return self._evaluate(eval_points).reshape( (self.n_basis, len(eval_points), self.dim_codomain)) def __len__(self) -> int: return self.n_basis def derivative(self, *, order: int = 1) -> '_fdatabasis.FDataBasis': """Construct a FDataBasis object containing the derivative. Args: order: Order of the derivative. Defaults to 1. Returns: Derivative object. """ return self.to_basis().derivative(order=order) def _derivative_basis_and_coefs(self, coefs: np.ndarray, order: int = 1): """ Subclasses can override this to provide derivative construction. A basis can provide derivative evaluation at given points without providing a basis representation for its derivatives, although is recommended to provide both if possible. """ raise NotImplementedError(f"{type(self)} basis does not support " "the construction of a basis of the " "derivatives.") def plot(self, chart=None, **kwargs): """Plot the basis object or its derivatives. Args: chart (figure object, axe or list of axes, optional): figure over with the graphs are plotted or axis over where the graphs are plotted. **kwargs: keyword arguments to be passed to the fdata.plot function. Returns: fig (figure): figure object in which the graphs are plotted. """ self.to_basis().plot(chart=chart, **kwargs) def _coordinate_nonfull(self, fdatabasis, key): """ Returns a fdatagrid for the coordinate functions indexed by key. Subclasses can override this to provide coordinate indexing. The key parameter has been already validated and is an integer or slice in the range [0, self.dim_codomain. """ raise NotImplementedError("Coordinate indexing not implemented") def _coordinate(self, fdatabasis, key): """Returns a fdatagrid for the coordinate functions indexed by key.""" # Raises error if not in range and normalize key r_key = range(self.dim_codomain)[key] if isinstance(r_key, range) and len(r_key) == 0: raise IndexError("Empty number of coordinates selected") # Full fdatabasis case if (self.dim_codomain == 1 and r_key == 0) or ( isinstance(r_key, range) and len(r_key) == self.dim_codomain): return fdatabasis.copy() else: return self._coordinate_nonfull(fdatabasis=fdatabasis, key=r_key) def rescale(self, domain_range=None): r"""Return a copy of the basis with a new :term:`domain` range, with the corresponding values rescaled to the new bounds. Args: domain_range (tuple, optional): Definition of the interval where the basis defines a space. Defaults uses the same as the original basis. """ return self.copy(domain_range=domain_range) def copy(self, domain_range=None): """Basis copy""" new_copy = copy.deepcopy(self) if domain_range is not None: domain_range = _domain_range(domain_range) # Some checks _check_domain(domain_range) new_copy._domain_range = domain_range return new_copy def to_basis(self) -> '_fdatabasis.FDataBasis': """Convert the Basis to FDatabasis. Returns: FDataBasis with this basis as its basis, and all basis functions as observations. """ from . import FDataBasis return FDataBasis(self.copy(), np.identity(self.n_basis)) def _list_to_R(self, knots): retstring = "c(" for i in range(0, len(knots)): retstring = retstring + str(knots[i]) + ", " return retstring[0:len(retstring) - 2] + ")" def _to_R(self): raise NotImplementedError def inner_product_matrix(self, other: 'Basis' = None) -> np.array: r"""Return the Inner Product Matrix of a pair of basis. The Inner Product Matrix is defined as .. math:: IP_{ij} = \langle\phi_i, \theta_j\rangle where :math:`\phi_i` is the ith element of the basi and :math:`\theta_j` is the jth element of the second basis. This matrix helps on the calculation of the inner product between objects on two basis and for the change of basis. Args: other: Basis to compute the inner product matrix. If not basis is given, it computes the matrix with itself returning the Gram Matrix Returns: Inner Product Matrix of two basis """ from ...misc import inner_product_matrix if other is None or self == other: return self.gram_matrix() return inner_product_matrix(self, other) def _gram_matrix_numerical(self) -> np.array: """ Compute the Gram matrix numerically. """ from ...misc import inner_product_matrix return inner_product_matrix(self, force_numerical=True) def _gram_matrix(self) -> np.array: """ Compute the Gram matrix. Subclasses may override this method for improving computation of the Gram matrix. """ return self._gram_matrix_numerical() def gram_matrix(self) -> np.array: r"""Return the Gram Matrix of a basis The Gram Matrix is defined as .. math:: G_{ij} = \langle\phi_i, \phi_j\rangle where :math:`\phi_i` is the ith element of the basis. This is a symmetric matrix and positive-semidefinite. Returns: Gram Matrix of the basis. """ gram = getattr(self, "_gram_matrix_cached", None) if gram is None: gram = self._gram_matrix() self._gram_matrix_cached = gram return gram def _add_same_basis(self, coefs1, coefs2): return self.copy(), coefs1 + coefs2 def _add_constant(self, coefs, constant): coefs = coefs.copy() constant = np.array(constant) coefs[:, 0] = coefs[:, 0] + constant return self.copy(), coefs def _sub_same_basis(self, coefs1, coefs2): return self.copy(), coefs1 - coefs2 def _sub_constant(self, coefs, other): coefs = coefs.copy() other = np.array(other) coefs[:, 0] = coefs[:, 0] - other return self.copy(), coefs def _mul_constant(self, coefs, other): coefs = coefs.copy() other = np.atleast_2d(other).reshape(-1, 1) coefs = coefs * other return self.copy(), coefs def __repr__(self) -> str: """Representation of a Basis object.""" return (f"{self.__class__.__name__}(domain_range={self.domain_range}, " f"n_basis={self.n_basis})") def __eq__(self, other) -> bool: """Equality of Basis""" return (type(self) == type(other) and _same_domain(self, other) and self.n_basis == other.n_basis) def __hash__(self) -> int: """Hash of Basis""" return hash((self.domain_range, self.n_basis))
30.788732
79
0.602928
"""Module for functional data manipulation in a basis system. Defines functional data object in a basis function system representation and the corresponding basis classes. """ import copy import warnings from abc import ABC, abstractmethod from typing import Tuple import numpy as np from ..._utils import _domain_range, _reshape_eval_points, _same_domain from . import _fdatabasis def _check_domain(domain_range): for domain in domain_range: if len(domain) != 2 or domain[0] >= domain[1]: raise ValueError(f"The interval {domain} is not well-defined.") class Basis(ABC): """Defines the structure of a basis function system. Attributes: domain_range (tuple): a tuple of length 2 containing the initial and end values of the interval over which the basis can be evaluated. n_basis (int): number of functions in the basis. """ def __init__(self, *, domain_range=None, n_basis: int = 1): """Basis constructor. Args: domain_range (tuple or list of tuples, optional): Definition of the interval where the basis defines a space. Defaults to (0,1). n_basis: Number of functions that form the basis. Defaults to 1. """ if domain_range is not None: domain_range = _domain_range(domain_range) # Some checks _check_domain(domain_range) if n_basis < 1: raise ValueError( "The number of basis has to be strictly positive.", ) self._domain_range = domain_range self._n_basis = n_basis super().__init__() def __call__(self, *args, **kwargs) -> np.ndarray: """Evaluate the basis using :meth:`evaluate`.""" return self.evaluate(*args, **kwargs) @property def dim_domain(self) -> int: return 1 @property def dim_codomain(self) -> int: return 1 @property def domain_range(self) -> Tuple[Tuple[float, float], ...]: if self._domain_range is None: return ((0, 1),) * self.dim_domain else: return self._domain_range @property def n_basis(self) -> int: return self._n_basis @abstractmethod def _evaluate(self, eval_points) -> np.ndarray: """Subclasses must override this to provide basis evaluation.""" pass def evaluate(self, eval_points, *, derivative: int = 0) -> np.ndarray: """Evaluate Basis objects and its derivatives. Evaluates the basis function system or its derivatives at a list of given values. Args: eval_points (array_like): List of points where the basis is evaluated. Returns: Matrix whose rows are the values of the each basis function or its derivatives at the values specified in eval_points. """ if derivative < 0: raise ValueError("derivative only takes non-negative values.") elif derivative != 0: warnings.warn("Parameter derivative is deprecated. Use the " "derivative function instead.", DeprecationWarning) return self.derivative(order=derivative)(eval_points) eval_points = _reshape_eval_points(eval_points, aligned=True, n_samples=self.n_basis, dim_domain=self.dim_domain) return self._evaluate(eval_points).reshape( (self.n_basis, len(eval_points), self.dim_codomain)) def __len__(self) -> int: return self.n_basis def derivative(self, *, order: int = 1) -> '_fdatabasis.FDataBasis': """Construct a FDataBasis object containing the derivative. Args: order: Order of the derivative. Defaults to 1. Returns: Derivative object. """ return self.to_basis().derivative(order=order) def _derivative_basis_and_coefs(self, coefs: np.ndarray, order: int = 1): """ Subclasses can override this to provide derivative construction. A basis can provide derivative evaluation at given points without providing a basis representation for its derivatives, although is recommended to provide both if possible. """ raise NotImplementedError(f"{type(self)} basis does not support " "the construction of a basis of the " "derivatives.") def plot(self, chart=None, **kwargs): """Plot the basis object or its derivatives. Args: chart (figure object, axe or list of axes, optional): figure over with the graphs are plotted or axis over where the graphs are plotted. **kwargs: keyword arguments to be passed to the fdata.plot function. Returns: fig (figure): figure object in which the graphs are plotted. """ self.to_basis().plot(chart=chart, **kwargs) def _coordinate_nonfull(self, fdatabasis, key): """ Returns a fdatagrid for the coordinate functions indexed by key. Subclasses can override this to provide coordinate indexing. The key parameter has been already validated and is an integer or slice in the range [0, self.dim_codomain. """ raise NotImplementedError("Coordinate indexing not implemented") def _coordinate(self, fdatabasis, key): """Returns a fdatagrid for the coordinate functions indexed by key.""" # Raises error if not in range and normalize key r_key = range(self.dim_codomain)[key] if isinstance(r_key, range) and len(r_key) == 0: raise IndexError("Empty number of coordinates selected") # Full fdatabasis case if (self.dim_codomain == 1 and r_key == 0) or ( isinstance(r_key, range) and len(r_key) == self.dim_codomain): return fdatabasis.copy() else: return self._coordinate_nonfull(fdatabasis=fdatabasis, key=r_key) def rescale(self, domain_range=None): r"""Return a copy of the basis with a new :term:`domain` range, with the corresponding values rescaled to the new bounds. Args: domain_range (tuple, optional): Definition of the interval where the basis defines a space. Defaults uses the same as the original basis. """ return self.copy(domain_range=domain_range) def copy(self, domain_range=None): """Basis copy""" new_copy = copy.deepcopy(self) if domain_range is not None: domain_range = _domain_range(domain_range) # Some checks _check_domain(domain_range) new_copy._domain_range = domain_range return new_copy def to_basis(self) -> '_fdatabasis.FDataBasis': """Convert the Basis to FDatabasis. Returns: FDataBasis with this basis as its basis, and all basis functions as observations. """ from . import FDataBasis return FDataBasis(self.copy(), np.identity(self.n_basis)) def _list_to_R(self, knots): retstring = "c(" for i in range(0, len(knots)): retstring = retstring + str(knots[i]) + ", " return retstring[0:len(retstring) - 2] + ")" def _to_R(self): raise NotImplementedError def inner_product_matrix(self, other: 'Basis' = None) -> np.array: r"""Return the Inner Product Matrix of a pair of basis. The Inner Product Matrix is defined as .. math:: IP_{ij} = \langle\phi_i, \theta_j\rangle where :math:`\phi_i` is the ith element of the basi and :math:`\theta_j` is the jth element of the second basis. This matrix helps on the calculation of the inner product between objects on two basis and for the change of basis. Args: other: Basis to compute the inner product matrix. If not basis is given, it computes the matrix with itself returning the Gram Matrix Returns: Inner Product Matrix of two basis """ from ...misc import inner_product_matrix if other is None or self == other: return self.gram_matrix() return inner_product_matrix(self, other) def _gram_matrix_numerical(self) -> np.array: """ Compute the Gram matrix numerically. """ from ...misc import inner_product_matrix return inner_product_matrix(self, force_numerical=True) def _gram_matrix(self) -> np.array: """ Compute the Gram matrix. Subclasses may override this method for improving computation of the Gram matrix. """ return self._gram_matrix_numerical() def gram_matrix(self) -> np.array: r"""Return the Gram Matrix of a basis The Gram Matrix is defined as .. math:: G_{ij} = \langle\phi_i, \phi_j\rangle where :math:`\phi_i` is the ith element of the basis. This is a symmetric matrix and positive-semidefinite. Returns: Gram Matrix of the basis. """ gram = getattr(self, "_gram_matrix_cached", None) if gram is None: gram = self._gram_matrix() self._gram_matrix_cached = gram return gram def _add_same_basis(self, coefs1, coefs2): return self.copy(), coefs1 + coefs2 def _add_constant(self, coefs, constant): coefs = coefs.copy() constant = np.array(constant) coefs[:, 0] = coefs[:, 0] + constant return self.copy(), coefs def _sub_same_basis(self, coefs1, coefs2): return self.copy(), coefs1 - coefs2 def _sub_constant(self, coefs, other): coefs = coefs.copy() other = np.array(other) coefs[:, 0] = coefs[:, 0] - other return self.copy(), coefs def _mul_constant(self, coefs, other): coefs = coefs.copy() other = np.atleast_2d(other).reshape(-1, 1) coefs = coefs * other return self.copy(), coefs def __repr__(self) -> str: """Representation of a Basis object.""" return (f"{self.__class__.__name__}(domain_range={self.domain_range}, " f"n_basis={self.n_basis})") def __eq__(self, other) -> bool: """Equality of Basis""" return (type(self) == type(other) and _same_domain(self, other) and self.n_basis == other.n_basis) def __hash__(self) -> int: """Hash of Basis""" return hash((self.domain_range, self.n_basis))
0
0
833626e74d4e5013fbedd077febd8ce8f93d00fe
2,613
py
Python
02-lm-tensorflow/loglin-lm.py
tinySean/nn4nlp-tensorflow
17d64427ad3cf276f2d43eac706d14a6145cc3e6
[ "Apache-2.0" ]
2
2019-03-04T10:53:23.000Z
2020-09-25T02:31:44.000Z
02-lm-tensorflow/loglin-lm.py
tinySean/nn4nlp-tensorflow
17d64427ad3cf276f2d43eac706d14a6145cc3e6
[ "Apache-2.0" ]
null
null
null
02-lm-tensorflow/loglin-lm.py
tinySean/nn4nlp-tensorflow
17d64427ad3cf276f2d43eac706d14a6145cc3e6
[ "Apache-2.0" ]
1
2020-09-22T10:33:02.000Z
2020-09-22T10:33:02.000Z
from collections import defaultdict import math import time import random import tensorflow as tf import numpy as np # The length of the n-gram N = 2 # Functions to read in the corpus # NOTE: We are using data from the Penn Treebank, which is already converted # into an easy-to-use format with "<unk>" symbols. If we were using other # data we would have to do pre-processing and consider how to choose # unknown words, etc. w2i = defaultdict(lambda: len(w2i)) S = w2i["<s>"] UNK = w2i["<unk>"] def read_dataset(filename): with open(filename, "r") as f: for line in f: yield [w2i[x] for x in line.strip().split(" ")] # Read in the data train = list(read_dataset("../data/ptb/train.txt")) w2i = defaultdict(lambda: UNK, w2i) dev = list(read_dataset("../data/ptb/valid.txt")) i2w = {v: k for k, v in w2i.items()} nwords = len(w2i) x1 = tf.placeholder(shape=(1,), dtype=tf.int32) x2 = tf.placeholder(shape=(1,), dtype=tf.int32) y = tf.placeholder(shape=(1,None), dtype=tf.int32) embedding1 = tf.get_variable(name="embedding1", shape=(nwords, nwords), initializer=tf.glorot_normal_initializer()) embedding2 = tf.get_variable(name="embedding2",shape=(nwords, nwords), initializer=tf.glorot_normal_initializer()) bias = tf.get_variable(name="bias", shape=(nwords), initializer=tf.glorot_normal_initializer()) embed1 = tf.nn.embedding_lookup(embedding1, x1) embed2 = tf.nn.embedding_lookup(embedding2, x2) score = embed1 + embed2 + bias loss = tf.nn.softmax_cross_entropy_with_logits(logits=score, labels=y) optimizer = tf.train.AdamOptimizer().minimize(loss) session = tf.Session() session.run(tf.global_variables_initializer()) for i in range(10): random.shuffle(train) total_loss = 0 train_words = 0 for id, sentence in enumerate(train): history = [S] * N sentence_loss = 0 for i in sentence + [S]: y_one_hot = np.zeros(shape=(1, nwords)) y_one_hot[0][i] = 1 input1, input2 = history history = history[1:] + [nwords] feed_train = {x1: [input1], x2: [input2], y: y_one_hot} char_loss, _ = session.run(fetches=[loss, optimizer], feed_dict=feed_train) sentence_loss += char_loss total_loss += sentence_loss train_words += len(sentence) if (id + 1) % 5000 == 0: print("--finished %r sentences, %.4f" % (id + 1, (total_loss / train_words))) print("iter %r: train loss/word=%.4f, ppl=%.4f" % ( i, total_loss / train_words, math.exp(total_loss / train_words)))
35.310811
115
0.654038
from collections import defaultdict import math import time import random import tensorflow as tf import numpy as np # The length of the n-gram N = 2 # Functions to read in the corpus # NOTE: We are using data from the Penn Treebank, which is already converted # into an easy-to-use format with "<unk>" symbols. If we were using other # data we would have to do pre-processing and consider how to choose # unknown words, etc. w2i = defaultdict(lambda: len(w2i)) S = w2i["<s>"] UNK = w2i["<unk>"] def read_dataset(filename): with open(filename, "r") as f: for line in f: yield [w2i[x] for x in line.strip().split(" ")] # Read in the data train = list(read_dataset("../data/ptb/train.txt")) w2i = defaultdict(lambda: UNK, w2i) dev = list(read_dataset("../data/ptb/valid.txt")) i2w = {v: k for k, v in w2i.items()} nwords = len(w2i) x1 = tf.placeholder(shape=(1,), dtype=tf.int32) x2 = tf.placeholder(shape=(1,), dtype=tf.int32) y = tf.placeholder(shape=(1,None), dtype=tf.int32) embedding1 = tf.get_variable(name="embedding1", shape=(nwords, nwords), initializer=tf.glorot_normal_initializer()) embedding2 = tf.get_variable(name="embedding2",shape=(nwords, nwords), initializer=tf.glorot_normal_initializer()) bias = tf.get_variable(name="bias", shape=(nwords), initializer=tf.glorot_normal_initializer()) embed1 = tf.nn.embedding_lookup(embedding1, x1) embed2 = tf.nn.embedding_lookup(embedding2, x2) score = embed1 + embed2 + bias loss = tf.nn.softmax_cross_entropy_with_logits(logits=score, labels=y) optimizer = tf.train.AdamOptimizer().minimize(loss) session = tf.Session() session.run(tf.global_variables_initializer()) for i in range(10): random.shuffle(train) total_loss = 0 train_words = 0 for id, sentence in enumerate(train): history = [S] * N sentence_loss = 0 for i in sentence + [S]: y_one_hot = np.zeros(shape=(1, nwords)) y_one_hot[0][i] = 1 input1, input2 = history history = history[1:] + [nwords] feed_train = {x1: [input1], x2: [input2], y: y_one_hot} char_loss, _ = session.run(fetches=[loss, optimizer], feed_dict=feed_train) sentence_loss += char_loss total_loss += sentence_loss train_words += len(sentence) if (id + 1) % 5000 == 0: print("--finished %r sentences, %.4f" % (id + 1, (total_loss / train_words))) print("iter %r: train loss/word=%.4f, ppl=%.4f" % ( i, total_loss / train_words, math.exp(total_loss / train_words)))
0
0
d92e03bffe94661a767cf2e5a8765b439f90506e
340
py
Python
hgapp/powers/migrations/0015_remove_base_power_example_powers.py
shadytradesman/The-Contract-Website
d8b353064f91c53ebab951dec784a0a36caba260
[ "Apache-2.0" ]
6
2020-10-03T12:15:05.000Z
2021-10-15T04:43:36.000Z
hgapp/powers/migrations/0015_remove_base_power_example_powers.py
shadytradesman/The-Contract-Website
d8b353064f91c53ebab951dec784a0a36caba260
[ "Apache-2.0" ]
99
2020-06-04T17:43:56.000Z
2022-03-12T01:07:20.000Z
hgapp/powers/migrations/0015_remove_base_power_example_powers.py
shadytradesman/The-Contract-Website
d8b353064f91c53ebab951dec784a0a36caba260
[ "Apache-2.0" ]
9
2020-06-06T16:39:09.000Z
2020-10-02T16:24:17.000Z
# Generated by Django 2.2.12 on 2020-08-02 14:03 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('powers', '0014_auto_20200731_1402'), ] operations = [ migrations.RemoveField( model_name='base_power', name='example_powers', ), ]
18.888889
48
0.605882
# Generated by Django 2.2.12 on 2020-08-02 14:03 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('powers', '0014_auto_20200731_1402'), ] operations = [ migrations.RemoveField( model_name='base_power', name='example_powers', ), ]
0
0
7465a346a19ecfbc1286a25f61bbc8e6e0865c9f
201
py
Python
tree.py
juhyun0/python_turtle2
59943c03a07a71aa33ab7124bca56f6b880b6883
[ "Unlicense" ]
null
null
null
tree.py
juhyun0/python_turtle2
59943c03a07a71aa33ab7124bca56f6b880b6883
[ "Unlicense" ]
null
null
null
tree.py
juhyun0/python_turtle2
59943c03a07a71aa33ab7124bca56f6b880b6883
[ "Unlicense" ]
null
null
null
def tree(length): if length>5: t.forward(length) t.right(20) tree(length-15) t.left(40) tree(length-15) t.right(20) t.backward(length) t.left(90) t.color("green") t.speed(1) tree(90)
11.823529
20
0.641791
def tree(length): if length>5: t.forward(length) t.right(20) tree(length-15) t.left(40) tree(length-15) t.right(20) t.backward(length) t.left(90) t.color("green") t.speed(1) tree(90)
0
0
5f574b8c03d61700a6552dfc9ad0569e5c66bcbe
3,648
py
Python
model/genetic_model.py
abduskhazi/PL-Binding-Affinity-Prediction-using-ML
fe7172570fa378480455b4dcd214d0b0c4e94ff0
[ "MIT" ]
1
2021-12-07T09:00:01.000Z
2021-12-07T09:00:01.000Z
model/genetic_model.py
abduskhazi/PL-Binding-Affinity-Prediction-using-ML
fe7172570fa378480455b4dcd214d0b0c4e94ff0
[ "MIT" ]
null
null
null
model/genetic_model.py
abduskhazi/PL-Binding-Affinity-Prediction-using-ML
fe7172570fa378480455b4dcd214d0b0c4e94ff0
[ "MIT" ]
null
null
null
# genetic algorithm search of the one max optimization problem from numpy.random import randint from numpy.random import rand import numpy as np import json # objective function def onemax(x): return -sum(x) def sorted_population(pop, scores): indices = scores.argsort() sorted_pop = [] for i in indices: sorted_pop += [pop[i]] return sorted_pop # tournament selection def selection(pop, scores, k=10): # first random selection selection_ix = randint(len(pop)) for ix in randint(0, len(pop), k-1): # check if better (e.g. perform a tournament) if scores[ix] < scores[selection_ix]: selection_ix = ix return pop[selection_ix] # crossover two parents to create two children def crossover(p1, p2, r_cross): # children are copies of parents by default c1, c2 = p1.copy(), p2.copy() # check for recombination if rand() < r_cross: # select crossover point that is not on the end of the string pt = randint(1, len(p1)-2) # perform crossover c1 = p1[:pt] + p2[pt:] c2 = p2[:pt] + p1[pt:] return [c1, c2] # mutation operator def mutation(bitstring, r_mut): for i in range(len(bitstring)): # check for a mutation if rand() < r_mut: # flip the bit bitstring[i] = 1 - bitstring[i] # genetic algorithm def genetic_algorithm(objective, X, y, n_bits, n_iter, n_pop, r_cross, r_mut, name = "genetic"): # initial population of random bitstring pop = [randint(0, 2, n_bits).tolist() for _ in range(n_pop)] # keep track of best solution best, best_eval = pop[0], objective([pop[0]], X, y)[0] with open(name + "_feature_selection.json", 'w') as f: json.dump((best_eval, best), f) # enumerate generations for gen in range(n_iter): print("Generation - ", gen) # evaluate all candidates in the population scores = objective(pop, X, y) # check for new best solution for i in range(n_pop): if scores[i] < best_eval: best, best_eval = pop[i], scores[i] #print(">%d, new best f(%s) = %.3f" % (gen, pop[i], scores[i])) print(">%d, new best = %.3f." % (gen, scores[i])) with open(name + "_feature_selection.json", 'w') as f: json.dump((scores[i], pop[i]), f) # select parents selected = [selection(pop, scores) for _ in range(n_pop - 50)] #Select the elite among the population selected += sorted_population(pop, np.array(scores))[:50] # create the next generation children = list() for i in range(0, n_pop, 2): # get selected parents in pairs p1, p2 = selected[i], selected[i+1] # crossover and mutation for c in crossover(p1, p2, r_cross): # mutation mutation(c, r_mut) # store for next generation children.append(c) # replace population pop = children with open(name + "_generation_info.data", "w") as f: json.dump(["Generation = " + str(gen), pop], f) return [best, best_eval] if False: # define the total iterations n_iter = 100 # bits n_bits = 500 #20 # define the population size n_pop = n_bits * 5 #100 # crossover rate r_cross = 0.9 # mutation rate r_mut = 1.0 / float(n_bits) # perform the genetic algorithm search best, score = genetic_algorithm(onemax, n_bits, n_iter, n_pop, r_cross, r_mut) print('Done!') print('f(%s) = %f' % (best, score))
34.415094
96
0.590186
# genetic algorithm search of the one max optimization problem from numpy.random import randint from numpy.random import rand import numpy as np import json # objective function def onemax(x): return -sum(x) def sorted_population(pop, scores): indices = scores.argsort() sorted_pop = [] for i in indices: sorted_pop += [pop[i]] return sorted_pop # tournament selection def selection(pop, scores, k=10): # first random selection selection_ix = randint(len(pop)) for ix in randint(0, len(pop), k-1): # check if better (e.g. perform a tournament) if scores[ix] < scores[selection_ix]: selection_ix = ix return pop[selection_ix] # crossover two parents to create two children def crossover(p1, p2, r_cross): # children are copies of parents by default c1, c2 = p1.copy(), p2.copy() # check for recombination if rand() < r_cross: # select crossover point that is not on the end of the string pt = randint(1, len(p1)-2) # perform crossover c1 = p1[:pt] + p2[pt:] c2 = p2[:pt] + p1[pt:] return [c1, c2] # mutation operator def mutation(bitstring, r_mut): for i in range(len(bitstring)): # check for a mutation if rand() < r_mut: # flip the bit bitstring[i] = 1 - bitstring[i] # genetic algorithm def genetic_algorithm(objective, X, y, n_bits, n_iter, n_pop, r_cross, r_mut, name = "genetic"): # initial population of random bitstring pop = [randint(0, 2, n_bits).tolist() for _ in range(n_pop)] # keep track of best solution best, best_eval = pop[0], objective([pop[0]], X, y)[0] with open(name + "_feature_selection.json", 'w') as f: json.dump((best_eval, best), f) # enumerate generations for gen in range(n_iter): print("Generation - ", gen) # evaluate all candidates in the population scores = objective(pop, X, y) # check for new best solution for i in range(n_pop): if scores[i] < best_eval: best, best_eval = pop[i], scores[i] #print(">%d, new best f(%s) = %.3f" % (gen, pop[i], scores[i])) print(">%d, new best = %.3f." % (gen, scores[i])) with open(name + "_feature_selection.json", 'w') as f: json.dump((scores[i], pop[i]), f) # select parents selected = [selection(pop, scores) for _ in range(n_pop - 50)] #Select the elite among the population selected += sorted_population(pop, np.array(scores))[:50] # create the next generation children = list() for i in range(0, n_pop, 2): # get selected parents in pairs p1, p2 = selected[i], selected[i+1] # crossover and mutation for c in crossover(p1, p2, r_cross): # mutation mutation(c, r_mut) # store for next generation children.append(c) # replace population pop = children with open(name + "_generation_info.data", "w") as f: json.dump(["Generation = " + str(gen), pop], f) return [best, best_eval] if False: # define the total iterations n_iter = 100 # bits n_bits = 500 #20 # define the population size n_pop = n_bits * 5 #100 # crossover rate r_cross = 0.9 # mutation rate r_mut = 1.0 / float(n_bits) # perform the genetic algorithm search best, score = genetic_algorithm(onemax, n_bits, n_iter, n_pop, r_cross, r_mut) print('Done!') print('f(%s) = %f' % (best, score))
0
0
96164478cbee8505379a42f6487489b2e0b29439
7,588
py
Python
dockerizing-django/web/joblistings/forms.py
MattYu/ConcordiaAce
35eff7614652eb548e532dcf00e3a7296855285c
[ "MIT" ]
1
2021-06-14T06:54:16.000Z
2021-06-14T06:54:16.000Z
joblistings/forms.py
MattYu/ConcordiaAce
35eff7614652eb548e532dcf00e3a7296855285c
[ "MIT" ]
34
2020-04-05T01:14:31.000Z
2022-03-12T00:23:02.000Z
joblistings/forms.py
MattYu/ConcordiaAce
35eff7614652eb548e532dcf00e3a7296855285c
[ "MIT" ]
null
null
null
from django import forms from joblistings.models import Job from accounts.models import Employer from ace.constants import CATEGORY_CHOICES, MAX_LENGTH_TITLE, MAX_LENGTH_DESCRIPTION, MAX_LENGTH_RESPONSABILITIES, MAX_LENGTH_REQUIREMENTS, MAX_LENGTH_STANDARDFIELDS, LOCATION_CHOICES from tinymce.widgets import TinyMCE from companies.models import Company from joblistings.models import Job, JobPDFDescription from django.shortcuts import get_object_or_404 from accounts.models import Employer class JobForm(forms.Form): title = forms.CharField(max_length=MAX_LENGTH_TITLE, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Your job title here'}) ) category = forms.ChoiceField( choices = CATEGORY_CHOICES, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) salaryRange = forms.CharField( required=False, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Salary range'}) ) vacancy = forms.IntegerField( required=False, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Vacancy'}) ) expirationDate = forms.CharField( widget=forms.TextInput(attrs={'class': 'form-control', 'type': 'date'}) ) startDate = forms.CharField( widget=forms.TextInput(attrs={'class': 'form-control', 'type': 'date'}) ) duration = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Total duration in months'}) ) description = forms.CharField( max_length=MAX_LENGTH_DESCRIPTION, widget=TinyMCE(attrs={'class': 'tinymce-editor tinymce-editor-1'}) ) responsabilities = forms.CharField( max_length=MAX_LENGTH_RESPONSABILITIES, widget=TinyMCE(attrs={'class': 'tinymce-editor tinymce-editor-2'}) ) requirements = forms.CharField( max_length=MAX_LENGTH_REQUIREMENTS, widget=TinyMCE(attrs={'class': 'tinymce-editor tinymce-editor-2'}) ) country = forms.ChoiceField( choices = LOCATION_CHOICES, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Country'}) ) location = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'City'}) ) postcode = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Postal Code'}) ) yourLocation = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Your location'}) ) company = forms.ChoiceField( widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) descriptionFile = forms.FileField(required=False) class Meta: model = Job exclude = ('company',) def __init__(self, *args, **kwargs): user = kwargs.pop('user', None) super().__init__(*args, **kwargs) if user.user_type == 4: company = Company.objects.all() else: company = [Employer.objects.get(user=user).company] company_choices = [] for obj in company: company_choices.append((obj.pk, obj)) self.fields['company'].choices = company_choices def clean(self): cleaned_data = super().clean() title = cleaned_data.get('title') category = cleaned_data.get('category') salaryRange = cleaned_data.get('salaryRange') vacancy = cleaned_data.get('vacancy') expirationDate = cleaned_data.get('expirationDate') startDate = cleaned_data.get('startDate') duration = cleaned_data.get('duration') description = cleaned_data.get('description') responsabilities = cleaned_data.get('responsabilities') requirements = cleaned_data.get('requirements') country = cleaned_data.get('country') location = cleaned_data.get('location') postcode = cleaned_data.get('postcode') yourLocation = cleaned_data.get('yourLocation') company = cleaned_data.get('company') self.cleaned_data = cleaned_data if not title and not location and not salaryRange and not description and not location and not postcode: raise forms.ValidationError('You have to write something') ''' name = cleaned_data.get('name') email = cleaned_data.get('email') message = cleaned_data.get('message') if not name and not email and not message: raise forms.ValidationError('You have to write something!') ''' def save(self): job = Job() cleaned_data = self.cleaned_data job.title = cleaned_data.get('title') job.category = cleaned_data.get('category') job.salaryRange = cleaned_data.get('salaryRange') job.vacancy = cleaned_data.get('vacancy') job.expirationDate = cleaned_data.get('expirationDate') job.startDate = cleaned_data.get('startDate') job.duration = cleaned_data.get('duration') job.description = cleaned_data.get('description') job.responsabilities = cleaned_data.get('responsabilities') job.requirements = cleaned_data.get('requirements') job.country = cleaned_data.get('country') job.location = cleaned_data.get('location') job.postcode = cleaned_data.get('postcode') job.yourLocation = cleaned_data.get('yourLocation') job.company = get_object_or_404(Company, pk=cleaned_data.get('company')) job.save() if cleaned_data.get('descriptionFile'): jobPDFDescription = JobPDFDescription() jobPDFDescription.job = job jobPDFDescription.descriptionFile = cleaned_data.get('descriptionFile') jobPDFDescription.save() return job class AdminAddRemoveJobPermission(forms.Form): addEmployer = forms.ChoiceField( required = False, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) removeEmployer = forms.ChoiceField( required = False, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) def __init__(self, *args, **kwargs): jobId = kwargs.pop('jobId', None) super().__init__(*args, **kwargs) if jobId: currentPermission = [] job = Job.objects.filter(pk= jobId).all()[0] employerSet = set() for employer in job.jobAccessPermission.all(): currentPermission.append((employer.pk, employer.user.email)) employerSet.add(employer) employerOfSameCompanyWithoutPermission = Employer.objects.filter(company = job.company).all() sameCompany = [] for employer in employerOfSameCompanyWithoutPermission.all(): if employer not in employerSet: sameCompany.append((employer.pk, employer.user.email)) sorted(currentPermission, key=lambda x: x[1]) sorted(sameCompany, key=lambda x: x[1]) currentPermission.insert(0, ("Remove Permission", "Revoke Permission")) sameCompany.insert(0, ("Add Permission", "Add Permission from " + job.company.name)) self.fields['addEmployer'].choices = sameCompany self.fields['removeEmployer'].choices = currentPermission
38.714286
183
0.649974
from django import forms from joblistings.models import Job from accounts.models import Employer from ace.constants import CATEGORY_CHOICES, MAX_LENGTH_TITLE, MAX_LENGTH_DESCRIPTION, MAX_LENGTH_RESPONSABILITIES, MAX_LENGTH_REQUIREMENTS, MAX_LENGTH_STANDARDFIELDS, LOCATION_CHOICES from tinymce.widgets import TinyMCE from companies.models import Company from joblistings.models import Job, JobPDFDescription from django.shortcuts import get_object_or_404 from accounts.models import Employer class JobForm(forms.Form): title = forms.CharField(max_length=MAX_LENGTH_TITLE, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Your job title here'}) ) category = forms.ChoiceField( choices = CATEGORY_CHOICES, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) salaryRange = forms.CharField( required=False, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Salary range'}) ) vacancy = forms.IntegerField( required=False, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Vacancy'}) ) expirationDate = forms.CharField( widget=forms.TextInput(attrs={'class': 'form-control', 'type': 'date'}) ) startDate = forms.CharField( widget=forms.TextInput(attrs={'class': 'form-control', 'type': 'date'}) ) duration = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Total duration in months'}) ) description = forms.CharField( max_length=MAX_LENGTH_DESCRIPTION, widget=TinyMCE(attrs={'class': 'tinymce-editor tinymce-editor-1'}) ) responsabilities = forms.CharField( max_length=MAX_LENGTH_RESPONSABILITIES, widget=TinyMCE(attrs={'class': 'tinymce-editor tinymce-editor-2'}) ) requirements = forms.CharField( max_length=MAX_LENGTH_REQUIREMENTS, widget=TinyMCE(attrs={'class': 'tinymce-editor tinymce-editor-2'}) ) country = forms.ChoiceField( choices = LOCATION_CHOICES, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Country'}) ) location = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'City'}) ) postcode = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Postal Code'}) ) yourLocation = forms.CharField(max_length=20, widget=forms.TextInput(attrs={'class': 'form-control', 'placeholder': 'Your location'}) ) company = forms.ChoiceField( widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) descriptionFile = forms.FileField(required=False) class Meta: model = Job exclude = ('company',) def __init__(self, *args, **kwargs): user = kwargs.pop('user', None) super().__init__(*args, **kwargs) if user.user_type == 4: company = Company.objects.all() else: company = [Employer.objects.get(user=user).company] company_choices = [] for obj in company: company_choices.append((obj.pk, obj)) self.fields['company'].choices = company_choices def clean(self): cleaned_data = super().clean() title = cleaned_data.get('title') category = cleaned_data.get('category') salaryRange = cleaned_data.get('salaryRange') vacancy = cleaned_data.get('vacancy') expirationDate = cleaned_data.get('expirationDate') startDate = cleaned_data.get('startDate') duration = cleaned_data.get('duration') description = cleaned_data.get('description') responsabilities = cleaned_data.get('responsabilities') requirements = cleaned_data.get('requirements') country = cleaned_data.get('country') location = cleaned_data.get('location') postcode = cleaned_data.get('postcode') yourLocation = cleaned_data.get('yourLocation') company = cleaned_data.get('company') self.cleaned_data = cleaned_data if not title and not location and not salaryRange and not description and not location and not postcode: raise forms.ValidationError('You have to write something') ''' name = cleaned_data.get('name') email = cleaned_data.get('email') message = cleaned_data.get('message') if not name and not email and not message: raise forms.ValidationError('You have to write something!') ''' def save(self): job = Job() cleaned_data = self.cleaned_data job.title = cleaned_data.get('title') job.category = cleaned_data.get('category') job.salaryRange = cleaned_data.get('salaryRange') job.vacancy = cleaned_data.get('vacancy') job.expirationDate = cleaned_data.get('expirationDate') job.startDate = cleaned_data.get('startDate') job.duration = cleaned_data.get('duration') job.description = cleaned_data.get('description') job.responsabilities = cleaned_data.get('responsabilities') job.requirements = cleaned_data.get('requirements') job.country = cleaned_data.get('country') job.location = cleaned_data.get('location') job.postcode = cleaned_data.get('postcode') job.yourLocation = cleaned_data.get('yourLocation') job.company = get_object_or_404(Company, pk=cleaned_data.get('company')) job.save() if cleaned_data.get('descriptionFile'): jobPDFDescription = JobPDFDescription() jobPDFDescription.job = job jobPDFDescription.descriptionFile = cleaned_data.get('descriptionFile') jobPDFDescription.save() return job class AdminAddRemoveJobPermission(forms.Form): addEmployer = forms.ChoiceField( required = False, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) removeEmployer = forms.ChoiceField( required = False, widget=forms.Select(attrs={'class': 'form-control', 'placeholder': 'Select Category'}) ) def __init__(self, *args, **kwargs): jobId = kwargs.pop('jobId', None) super().__init__(*args, **kwargs) if jobId: currentPermission = [] job = Job.objects.filter(pk= jobId).all()[0] employerSet = set() for employer in job.jobAccessPermission.all(): currentPermission.append((employer.pk, employer.user.email)) employerSet.add(employer) employerOfSameCompanyWithoutPermission = Employer.objects.filter(company = job.company).all() sameCompany = [] for employer in employerOfSameCompanyWithoutPermission.all(): if employer not in employerSet: sameCompany.append((employer.pk, employer.user.email)) sorted(currentPermission, key=lambda x: x[1]) sorted(sameCompany, key=lambda x: x[1]) currentPermission.insert(0, ("Remove Permission", "Revoke Permission")) sameCompany.insert(0, ("Add Permission", "Add Permission from " + job.company.name)) self.fields['addEmployer'].choices = sameCompany self.fields['removeEmployer'].choices = currentPermission
0
0
766d0dea3bfd78b0a9fc1313787741bb68697e98
323
py
Python
cogdl/wrappers/data_wrapper/node_classification/__init__.py
li-ziang/cogdl
60022d3334e3abae2d2a505e6e049a26acf10f39
[ "MIT" ]
6
2020-07-09T02:48:41.000Z
2021-06-16T09:04:14.000Z
cogdl/wrappers/data_wrapper/node_classification/__init__.py
li-ziang/cogdl
60022d3334e3abae2d2a505e6e049a26acf10f39
[ "MIT" ]
null
null
null
cogdl/wrappers/data_wrapper/node_classification/__init__.py
li-ziang/cogdl
60022d3334e3abae2d2a505e6e049a26acf10f39
[ "MIT" ]
1
2020-05-19T11:45:45.000Z
2020-05-19T11:45:45.000Z
from .cluster_dw import ClusterWrapper from .graphsage_dw import GraphSAGEDataWrapper from .m3s_dw import M3SDataWrapper from .network_embedding_dw import NetworkEmbeddingDataWrapper from .node_classification_dw import FullBatchNodeClfDataWrapper from .pprgo_dw import PPRGoDataWrapper from .sagn_dw import SAGNDataWrapper
40.375
63
0.891641
from .cluster_dw import ClusterWrapper from .graphsage_dw import GraphSAGEDataWrapper from .m3s_dw import M3SDataWrapper from .network_embedding_dw import NetworkEmbeddingDataWrapper from .node_classification_dw import FullBatchNodeClfDataWrapper from .pprgo_dw import PPRGoDataWrapper from .sagn_dw import SAGNDataWrapper
0
0
dc39a84fe404c1eef75b6fc371c87b856fc55a84
500
py
Python
run.py
nefeli/trafficgen
81b6cb01d8e9d0abfcd83df641210035e265f13f
[ "BSD-3-Clause" ]
10
2017-04-26T07:01:48.000Z
2020-07-25T00:29:45.000Z
run.py
nefeli/trafficgen
81b6cb01d8e9d0abfcd83df641210035e265f13f
[ "BSD-3-Clause" ]
12
2017-03-21T17:58:16.000Z
2017-10-16T18:01:37.000Z
run.py
nefeli/trafficgen
81b6cb01d8e9d0abfcd83df641210035e265f13f
[ "BSD-3-Clause" ]
5
2017-03-09T19:59:26.000Z
2018-04-02T19:49:57.000Z
#!/usr/bin/env python3 import io import sys import generator from generator.cmdline import * if __name__ == '__main__': if len(sys.argv) == 1: run_cli() else: cmds = [] line_buf = [] for arg in sys.argv[1:]: if arg == '--': cmds.append(' '.join(line_buf)) line_buf = [] else: line_buf.append(arg) cmds.append(' '.join(line_buf)) run_cmds(io.StringIO('\n'.join(cmds)))
20.833333
47
0.498
#!/usr/bin/env python3 import io import sys import generator from generator.cmdline import * if __name__ == '__main__': if len(sys.argv) == 1: run_cli() else: cmds = [] line_buf = [] for arg in sys.argv[1:]: if arg == '--': cmds.append(' '.join(line_buf)) line_buf = [] else: line_buf.append(arg) cmds.append(' '.join(line_buf)) run_cmds(io.StringIO('\n'.join(cmds)))
0
0
2d0afb7f18f7dfcc8cf1e3ca1087c009e3e728f5
974
py
Python
scripts/genotype_from_fpaths.py
JIC-Image-Analysis/fishtools
9d7cfa695711ec4b40986be65e11eea7ad1b0b5d
[ "MIT" ]
null
null
null
scripts/genotype_from_fpaths.py
JIC-Image-Analysis/fishtools
9d7cfa695711ec4b40986be65e11eea7ad1b0b5d
[ "MIT" ]
null
null
null
scripts/genotype_from_fpaths.py
JIC-Image-Analysis/fishtools
9d7cfa695711ec4b40986be65e11eea7ad1b0b5d
[ "MIT" ]
1
2022-03-10T13:08:21.000Z
2022-03-10T13:08:21.000Z
import os import pathlib import click import parse from fishtools.config import Config def is_image(filename, image_exts=['.czi']): _, ext = os.path.splitext(filename) return ext in image_exts @click.command() @click.argument('config_fpath') def main(config_fpath): config = Config(config_fpath) dirpath = pathlib.Path(config.images_root_dirpath) dirpaths_fns = [] for dirpath, dirnames, filenames in os.walk(dirpath): for fn in filenames: if is_image(fn): dirpaths_fns.append((dirpath, fn)) expid_to_genotype = {} image_name_template = "Experiment-{expid:d}.czi" for dirpath, fn in dirpaths_fns: result = parse.parse(image_name_template, fn) expid = result.named['expid'] expid_to_genotype[expid] = os.path.basename(dirpath) for expid, genotype in expid_to_genotype.items(): print(f"{expid}\t{genotype}") if __name__ == "__main__": main()
20.723404
60
0.666324
import os import pathlib import click import parse from fishtools.config import Config def is_image(filename, image_exts=['.czi']): _, ext = os.path.splitext(filename) return ext in image_exts @click.command() @click.argument('config_fpath') def main(config_fpath): config = Config(config_fpath) dirpath = pathlib.Path(config.images_root_dirpath) dirpaths_fns = [] for dirpath, dirnames, filenames in os.walk(dirpath): for fn in filenames: if is_image(fn): dirpaths_fns.append((dirpath, fn)) expid_to_genotype = {} image_name_template = "Experiment-{expid:d}.czi" for dirpath, fn in dirpaths_fns: result = parse.parse(image_name_template, fn) expid = result.named['expid'] expid_to_genotype[expid] = os.path.basename(dirpath) for expid, genotype in expid_to_genotype.items(): print(f"{expid}\t{genotype}") if __name__ == "__main__": main()
0
0
052f73c51d8e906ef7280490bdcc2bd79bb64740
4,643
py
Python
list_id_bimap.py
martincochran/score-minion
58197798a0a3a4fbcd54ffa0a2fab2e865985bfd
[ "Apache-2.0" ]
null
null
null
list_id_bimap.py
martincochran/score-minion
58197798a0a3a4fbcd54ffa0a2fab2e865985bfd
[ "Apache-2.0" ]
3
2015-02-15T18:31:10.000Z
2015-02-22T19:56:05.000Z
list_id_bimap.py
martincochran/score-minion
58197798a0a3a4fbcd54ffa0a2fab2e865985bfd
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # # Copyright 2015 Martin Cochran # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from game_model import Game from scores_messages import AgeBracket from scores_messages import Division from scores_messages import League class ListIdBiMap: """Encapsulates mappings to and from list id and structured properties.""" # List ID definitions corresponding to lists defined in the twitter account of # @martin_cochran. USAU_COLLEGE_OPEN_LIST_ID = '186814318' USAU_COLLEGE_WOMENS_LIST_ID = '186814882' USAU_CLUB_OPEN_LIST_ID = '186732484' USAU_CLUB_WOMENS_LIST_ID = '186732631' USAU_CLUB_MIXED_LIST_ID = '186815046' AUDL_LIST_ID = '186926608' MLU_LIST_ID = '186926651' ALL_LISTS = [ USAU_COLLEGE_OPEN_LIST_ID, USAU_COLLEGE_WOMENS_LIST_ID, USAU_CLUB_OPEN_LIST_ID, USAU_CLUB_WOMENS_LIST_ID, USAU_CLUB_MIXED_LIST_ID, AUDL_LIST_ID, MLU_LIST_ID ] # Simple data structure to lookup lists if the league, division, and age # bracket were specified in the request. LIST_ID_MAP = { League.USAU: { Division.OPEN: { AgeBracket.COLLEGE: USAU_COLLEGE_OPEN_LIST_ID, AgeBracket.NO_RESTRICTION: USAU_CLUB_OPEN_LIST_ID, }, Division.WOMENS: { AgeBracket.COLLEGE: USAU_COLLEGE_WOMENS_LIST_ID, AgeBracket.NO_RESTRICTION: USAU_CLUB_WOMENS_LIST_ID, }, Division.MIXED: { AgeBracket.NO_RESTRICTION: USAU_CLUB_MIXED_LIST_ID, }, }, League.AUDL: { Division.OPEN: { AgeBracket.NO_RESTRICTION: AUDL_LIST_ID, }, }, League.MLU: { Division.OPEN: { AgeBracket.NO_RESTRICTION: MLU_LIST_ID, }, }, } LIST_ID_TO_DIVISION = { USAU_COLLEGE_OPEN_LIST_ID: Division.OPEN, USAU_COLLEGE_WOMENS_LIST_ID: Division.WOMENS, USAU_CLUB_OPEN_LIST_ID: Division.OPEN, USAU_CLUB_WOMENS_LIST_ID: Division.WOMENS, USAU_CLUB_MIXED_LIST_ID: Division.MIXED, AUDL_LIST_ID: Division.OPEN, MLU_LIST_ID: Division.OPEN, } LIST_ID_TO_AGE_BRACKET = { USAU_COLLEGE_OPEN_LIST_ID: AgeBracket.COLLEGE, USAU_COLLEGE_WOMENS_LIST_ID: AgeBracket.COLLEGE, USAU_CLUB_OPEN_LIST_ID: AgeBracket.NO_RESTRICTION, USAU_CLUB_WOMENS_LIST_ID: AgeBracket.NO_RESTRICTION, USAU_CLUB_MIXED_LIST_ID: AgeBracket.NO_RESTRICTION, AUDL_LIST_ID: AgeBracket.NO_RESTRICTION, MLU_LIST_ID: AgeBracket.NO_RESTRICTION, } LIST_ID_TO_LEAGUE = { USAU_COLLEGE_OPEN_LIST_ID: League.USAU, USAU_COLLEGE_WOMENS_LIST_ID: League.USAU, USAU_CLUB_OPEN_LIST_ID: League.USAU, USAU_CLUB_WOMENS_LIST_ID: League.USAU, USAU_CLUB_MIXED_LIST_ID: League.USAU, AUDL_LIST_ID: League.AUDL, MLU_LIST_ID: League.MLU, } @staticmethod def GetListId(division, age_bracket, league): """Looks up the list_id which corresponds to the given division and league. Args: division: Division of interest age_bracket: AgeBracket of interest league: League of interest Returns: The list id corresponding to that league and division, or '' if no such list exists. """ d = ListIdBiMap.LIST_ID_MAP.get(league, {}) if not d: return '' d = d.get(division, {}) if not d: return '' return d.get(age_bracket, '') @staticmethod def GetStructuredPropertiesForList(list_id): """Returns the division, age_bracket, and league for the given list id. Defaults to Division.OPEN, AgeBracket.NO_RESTRICTION, and League.USAU, if the division, age_bracket, or leauge, respectively, does not exist in the map for the given list_id. Args: list_id: ID of list for which to retrieve properties. Returns: (division, age_bracket, league) tuple for the given list ID. """ division = ListIdBiMap.LIST_ID_TO_DIVISION.get(list_id, Division.OPEN) age_bracket = ListIdBiMap.LIST_ID_TO_AGE_BRACKET.get(list_id, AgeBracket.NO_RESTRICTION) league = ListIdBiMap.LIST_ID_TO_LEAGUE.get(list_id, League.USAU) return (division, age_bracket, league)
31.80137
92
0.713117
#!/usr/bin/env python # # Copyright 2015 Martin Cochran # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from game_model import Game from scores_messages import AgeBracket from scores_messages import Division from scores_messages import League class ListIdBiMap: """Encapsulates mappings to and from list id and structured properties.""" # List ID definitions corresponding to lists defined in the twitter account of # @martin_cochran. USAU_COLLEGE_OPEN_LIST_ID = '186814318' USAU_COLLEGE_WOMENS_LIST_ID = '186814882' USAU_CLUB_OPEN_LIST_ID = '186732484' USAU_CLUB_WOMENS_LIST_ID = '186732631' USAU_CLUB_MIXED_LIST_ID = '186815046' AUDL_LIST_ID = '186926608' MLU_LIST_ID = '186926651' ALL_LISTS = [ USAU_COLLEGE_OPEN_LIST_ID, USAU_COLLEGE_WOMENS_LIST_ID, USAU_CLUB_OPEN_LIST_ID, USAU_CLUB_WOMENS_LIST_ID, USAU_CLUB_MIXED_LIST_ID, AUDL_LIST_ID, MLU_LIST_ID ] # Simple data structure to lookup lists if the league, division, and age # bracket were specified in the request. LIST_ID_MAP = { League.USAU: { Division.OPEN: { AgeBracket.COLLEGE: USAU_COLLEGE_OPEN_LIST_ID, AgeBracket.NO_RESTRICTION: USAU_CLUB_OPEN_LIST_ID, }, Division.WOMENS: { AgeBracket.COLLEGE: USAU_COLLEGE_WOMENS_LIST_ID, AgeBracket.NO_RESTRICTION: USAU_CLUB_WOMENS_LIST_ID, }, Division.MIXED: { AgeBracket.NO_RESTRICTION: USAU_CLUB_MIXED_LIST_ID, }, }, League.AUDL: { Division.OPEN: { AgeBracket.NO_RESTRICTION: AUDL_LIST_ID, }, }, League.MLU: { Division.OPEN: { AgeBracket.NO_RESTRICTION: MLU_LIST_ID, }, }, } LIST_ID_TO_DIVISION = { USAU_COLLEGE_OPEN_LIST_ID: Division.OPEN, USAU_COLLEGE_WOMENS_LIST_ID: Division.WOMENS, USAU_CLUB_OPEN_LIST_ID: Division.OPEN, USAU_CLUB_WOMENS_LIST_ID: Division.WOMENS, USAU_CLUB_MIXED_LIST_ID: Division.MIXED, AUDL_LIST_ID: Division.OPEN, MLU_LIST_ID: Division.OPEN, } LIST_ID_TO_AGE_BRACKET = { USAU_COLLEGE_OPEN_LIST_ID: AgeBracket.COLLEGE, USAU_COLLEGE_WOMENS_LIST_ID: AgeBracket.COLLEGE, USAU_CLUB_OPEN_LIST_ID: AgeBracket.NO_RESTRICTION, USAU_CLUB_WOMENS_LIST_ID: AgeBracket.NO_RESTRICTION, USAU_CLUB_MIXED_LIST_ID: AgeBracket.NO_RESTRICTION, AUDL_LIST_ID: AgeBracket.NO_RESTRICTION, MLU_LIST_ID: AgeBracket.NO_RESTRICTION, } LIST_ID_TO_LEAGUE = { USAU_COLLEGE_OPEN_LIST_ID: League.USAU, USAU_COLLEGE_WOMENS_LIST_ID: League.USAU, USAU_CLUB_OPEN_LIST_ID: League.USAU, USAU_CLUB_WOMENS_LIST_ID: League.USAU, USAU_CLUB_MIXED_LIST_ID: League.USAU, AUDL_LIST_ID: League.AUDL, MLU_LIST_ID: League.MLU, } @staticmethod def GetListId(division, age_bracket, league): """Looks up the list_id which corresponds to the given division and league. Args: division: Division of interest age_bracket: AgeBracket of interest league: League of interest Returns: The list id corresponding to that league and division, or '' if no such list exists. """ d = ListIdBiMap.LIST_ID_MAP.get(league, {}) if not d: return '' d = d.get(division, {}) if not d: return '' return d.get(age_bracket, '') @staticmethod def GetStructuredPropertiesForList(list_id): """Returns the division, age_bracket, and league for the given list id. Defaults to Division.OPEN, AgeBracket.NO_RESTRICTION, and League.USAU, if the division, age_bracket, or leauge, respectively, does not exist in the map for the given list_id. Args: list_id: ID of list for which to retrieve properties. Returns: (division, age_bracket, league) tuple for the given list ID. """ division = ListIdBiMap.LIST_ID_TO_DIVISION.get(list_id, Division.OPEN) age_bracket = ListIdBiMap.LIST_ID_TO_AGE_BRACKET.get(list_id, AgeBracket.NO_RESTRICTION) league = ListIdBiMap.LIST_ID_TO_LEAGUE.get(list_id, League.USAU) return (division, age_bracket, league)
0
0
468ad4ffeae4e5171b0014bec49676ed9cc8da05
2,988
py
Python
loralos/wms_image.py
SimonLarsen/loralos
198a6b94a984f12e7a069826e3f977db9de34d00
[ "MIT" ]
null
null
null
loralos/wms_image.py
SimonLarsen/loralos
198a6b94a984f12e7a069826e3f977db9de34d00
[ "MIT" ]
null
null
null
loralos/wms_image.py
SimonLarsen/loralos
198a6b94a984f12e7a069826e3f977db9de34d00
[ "MIT" ]
null
null
null
from owslib.wms import WebMapService import pyproj from PIL import Image from typing import Tuple, List, Dict, Any import os.path from pathlib import Path FORMAT_ENDINGS = {"image/jpeg": "jpg"} class WMSImage: def __init__( self, url: str, layer: str, cache_dir: str, style: str = "default", tile_size: int = 1000, resolution: int = 500, format: str = "image/jpeg", crs: str = "epsg:25832", headers: Dict[str, Any] = None, ) -> None: self.url = url self.layer = layer self.cache_dir = cache_dir self.style = style self.tile_size = tile_size self.resolution = resolution self.format = format self.crs = crs self.headers = headers self.wms = WebMapService(self.url, headers=self.headers) self.trans = pyproj.Transformer.from_crs( "wgs84", self.crs, always_xy=True ) self.cached_image = None def load_tile(self, x: float, y: float) -> None: tx = int(x // self.tile_size * self.tile_size) ty = int(y // self.tile_size * self.tile_size) bbox = (tx, ty, tx + self.tile_size, ty + self.tile_size) cache_file = Path(self.cache_dir) / ( f"wms_{self.layer}_{bbox[0]}_{bbox[1]}_{bbox[2]}_{bbox[3]}" f"_{self.tile_size}_{self.resolution}" f".{FORMAT_ENDINGS[self.format]}" ) if not os.path.exists(cache_file): res = self.wms.getmap( layers=[self.layer], styles=[self.style], srs=self.crs, bbox=bbox, size=(self.resolution, self.resolution), format=self.format, ) with open(cache_file, "wb") as fp: fp.write(res.read()) image = Image.open(cache_file) self.cached_image = image.load() def get_pixels( self, lons: List[float], lats: List[float] ) -> List[Tuple[float, float, float]]: points = [None] * len(lons) tiles = [None] * len(lons) for i in range(len(lons)): x, y = self.trans.transform(lons[i], lats[i]) points[i] = (x, y) tx = int(x // self.tile_size * self.tile_size) ty = int(y // self.tile_size * self.tile_size) tiles[i] = (tx, ty) order = list(range(len(lons))) order.sort(key=lambda i: tiles[i]) prev_tile = None out = [None] * len(lons) for i in order: tile = tiles[i] if tile != prev_tile: self.load_tile(*tile) prev_tile = tile x, y = points[i] px = round((x - tile[0]) / self.tile_size * (self.resolution - 1)) py = round( (1.0 - (y - tile[1]) / self.tile_size) * (self.resolution - 1) ) out[i] = self.cached_image[px, py] return out
30.489796
78
0.522758
from owslib.wms import WebMapService import pyproj from PIL import Image from typing import Tuple, List, Dict, Any import os.path from pathlib import Path FORMAT_ENDINGS = {"image/jpeg": "jpg"} class WMSImage: def __init__( self, url: str, layer: str, cache_dir: str, style: str = "default", tile_size: int = 1000, resolution: int = 500, format: str = "image/jpeg", crs: str = "epsg:25832", headers: Dict[str, Any] = None, ) -> None: self.url = url self.layer = layer self.cache_dir = cache_dir self.style = style self.tile_size = tile_size self.resolution = resolution self.format = format self.crs = crs self.headers = headers self.wms = WebMapService(self.url, headers=self.headers) self.trans = pyproj.Transformer.from_crs( "wgs84", self.crs, always_xy=True ) self.cached_image = None def load_tile(self, x: float, y: float) -> None: tx = int(x // self.tile_size * self.tile_size) ty = int(y // self.tile_size * self.tile_size) bbox = (tx, ty, tx + self.tile_size, ty + self.tile_size) cache_file = Path(self.cache_dir) / ( f"wms_{self.layer}_{bbox[0]}_{bbox[1]}_{bbox[2]}_{bbox[3]}" f"_{self.tile_size}_{self.resolution}" f".{FORMAT_ENDINGS[self.format]}" ) if not os.path.exists(cache_file): res = self.wms.getmap( layers=[self.layer], styles=[self.style], srs=self.crs, bbox=bbox, size=(self.resolution, self.resolution), format=self.format, ) with open(cache_file, "wb") as fp: fp.write(res.read()) image = Image.open(cache_file) self.cached_image = image.load() def get_pixels( self, lons: List[float], lats: List[float] ) -> List[Tuple[float, float, float]]: points = [None] * len(lons) tiles = [None] * len(lons) for i in range(len(lons)): x, y = self.trans.transform(lons[i], lats[i]) points[i] = (x, y) tx = int(x // self.tile_size * self.tile_size) ty = int(y // self.tile_size * self.tile_size) tiles[i] = (tx, ty) order = list(range(len(lons))) order.sort(key=lambda i: tiles[i]) prev_tile = None out = [None] * len(lons) for i in order: tile = tiles[i] if tile != prev_tile: self.load_tile(*tile) prev_tile = tile x, y = points[i] px = round((x - tile[0]) / self.tile_size * (self.resolution - 1)) py = round( (1.0 - (y - tile[1]) / self.tile_size) * (self.resolution - 1) ) out[i] = self.cached_image[px, py] return out
0
0
1acaf21ff5c98fb66692384b82ca684a87d4e348
5,004
py
Python
mail/gmailapi.py
prabin-acharya/mail-Gmail
b39bfbd48fedcd3e2a101cd0d2d4c3302faa233d
[ "MIT" ]
1
2021-08-08T04:02:32.000Z
2021-08-08T04:02:32.000Z
mail/gmailapi.py
prabin-acharya/mail-Gmail
b39bfbd48fedcd3e2a101cd0d2d4c3302faa233d
[ "MIT" ]
null
null
null
mail/gmailapi.py
prabin-acharya/mail-Gmail
b39bfbd48fedcd3e2a101cd0d2d4c3302faa233d
[ "MIT" ]
null
null
null
from __future__ import print_function import os.path from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from google.oauth2.credentials import Credentials import time from email.mime.text import MIMEText from .models import Email import base64 import email import json import datetime import pytz import re # If modifying these scopes, delete the file token.json. SCOPES = ['https://www.googleapis.com/auth/gmail.modify'] creds = None # The file token.json stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.json'): creds = Credentials.from_authorized_user_file('token.json', SCOPES) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.json', 'w') as token: token.write(creds.to_json()) service = build('gmail', 'v1', credentials=creds) def data_encoder(text): if len(text)>0: message = base64.urlsafe_b64decode(text) message = str(message, 'utf-8') message = email.message_from_string(message) return message def readMessage(content)->str: message = None if "data" in content['payload']['body']: message = content['payload']['body']['data'] message = data_encoder(message) elif "data" in content['payload']['parts'][0]['body']: message = content['payload']['parts'][0]['body']['data'] message = data_encoder(message) else: print("body has no data.") return message def get_inbox_gmails(): # Call the Gmail API results = service.users().messages().list(userId='me',labelIds=["INBOX"],q="is:unread category:primary").execute() messages = results.get('messages', []) for message in messages: save_mail(message) def send_gmail(recipient, subject, body): mail_from = service.users().getProfile(userId='me').execute()['emailAddress'] mail_to = recipient mail_subject = subject mail_body = body mail = MIMEText(mail_body) mail['to'] = mail_to mail['from'] = mail_from mail['subject'] = mail_subject raw = base64.urlsafe_b64encode(mail.as_bytes()) raw = raw.decode() body = {'raw': raw} try: mail = (service.users().messages().send(userId='me', body=body).execute()) print("Your mail has been sent") except errors.MessageError as error: print("An error occured.Mail not sent.") def get_sent_gmails(): results = service.users().messages().list(userId='me',labelIds=["SENT"]).execute() messages = results.get('messages', []) for message in messages[:5]: save_mail(message) def save_mail(message): mail = service.users().messages().get(userId='me', id=message['id'], format="full").execute() headers=mail["payload"]["headers"] user = service.users().getProfile(userId='me').execute()['emailAddress'] gmail_id = message['id'] for i in headers: if i["name"] == "From" or i["name"] == "from": sender = i["value"] sender_email = re.search('<(.+)>', sender) if sender_email: sender_email = sender_email.group(1) else: sender_email = sender elif i["name"] == "To" or i["name"] == "to": recipients = i["value"] recipients_email = re.search('<(.+)>', recipients) if recipients_email: recipients_email = recipients_email.group(1) else: recipients_email = recipients elif i["name"] == "Subject" or i["name"] == "subject": subject = i["value"] elif i["name"] == "Date" or i["name"] == "date": date = i["value"] try: date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %Z') except: try: date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %z') except: try: date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %z (%Z)') except: date = date[:-6].strip() date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %z') body = readMessage(mail) mail2 = Email( user = user, gmail_id = gmail_id, sender = sender, sender_email = sender_email, recipients = recipients, recipients_email = recipients_email, subject = subject, body = body, timestamp = date ) mail2.save()
31.872611
118
0.607314
from __future__ import print_function import os.path from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from google.oauth2.credentials import Credentials import time from email.mime.text import MIMEText from .models import Email import base64 import email import json import datetime import pytz import re # If modifying these scopes, delete the file token.json. SCOPES = ['https://www.googleapis.com/auth/gmail.modify'] creds = None # The file token.json stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.json'): creds = Credentials.from_authorized_user_file('token.json', SCOPES) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.json', 'w') as token: token.write(creds.to_json()) service = build('gmail', 'v1', credentials=creds) def data_encoder(text): if len(text)>0: message = base64.urlsafe_b64decode(text) message = str(message, 'utf-8') message = email.message_from_string(message) return message def readMessage(content)->str: message = None if "data" in content['payload']['body']: message = content['payload']['body']['data'] message = data_encoder(message) elif "data" in content['payload']['parts'][0]['body']: message = content['payload']['parts'][0]['body']['data'] message = data_encoder(message) else: print("body has no data.") return message def get_inbox_gmails(): # Call the Gmail API results = service.users().messages().list(userId='me',labelIds=["INBOX"],q="is:unread category:primary").execute() messages = results.get('messages', []) for message in messages: save_mail(message) def send_gmail(recipient, subject, body): mail_from = service.users().getProfile(userId='me').execute()['emailAddress'] mail_to = recipient mail_subject = subject mail_body = body mail = MIMEText(mail_body) mail['to'] = mail_to mail['from'] = mail_from mail['subject'] = mail_subject raw = base64.urlsafe_b64encode(mail.as_bytes()) raw = raw.decode() body = {'raw': raw} try: mail = (service.users().messages().send(userId='me', body=body).execute()) print("Your mail has been sent") except errors.MessageError as error: print("An error occured.Mail not sent.") def get_sent_gmails(): results = service.users().messages().list(userId='me',labelIds=["SENT"]).execute() messages = results.get('messages', []) for message in messages[:5]: save_mail(message) def save_mail(message): mail = service.users().messages().get(userId='me', id=message['id'], format="full").execute() headers=mail["payload"]["headers"] user = service.users().getProfile(userId='me').execute()['emailAddress'] gmail_id = message['id'] for i in headers: if i["name"] == "From" or i["name"] == "from": sender = i["value"] sender_email = re.search('<(.+)>', sender) if sender_email: sender_email = sender_email.group(1) else: sender_email = sender elif i["name"] == "To" or i["name"] == "to": recipients = i["value"] recipients_email = re.search('<(.+)>', recipients) if recipients_email: recipients_email = recipients_email.group(1) else: recipients_email = recipients elif i["name"] == "Subject" or i["name"] == "subject": subject = i["value"] elif i["name"] == "Date" or i["name"] == "date": date = i["value"] try: date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %Z') except: try: date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %z') except: try: date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %z (%Z)') except: date = date[:-6].strip() date = datetime.datetime.strptime(date, '%a, %d %b %Y %X %z') body = readMessage(mail) mail2 = Email( user = user, gmail_id = gmail_id, sender = sender, sender_email = sender_email, recipients = recipients, recipients_email = recipients_email, subject = subject, body = body, timestamp = date ) mail2.save()
0
0
3c5b5a01aea276ed55213cc1efedac91d26ae1c8
1,580
py
Python
diagrams/ibm/blockchain.py
houmam/diagrams
eaf3e98304014e847c347bfae19bbfb3fe91abb2
[ "MIT" ]
17,037
2020-02-03T01:30:30.000Z
2022-03-31T18:09:15.000Z
diagrams/ibm/blockchain.py
loftwah/diagrams
e45804b48d5360fe5bae1b785db6527db5a57d16
[ "MIT" ]
529
2020-02-03T10:43:41.000Z
2022-03-31T17:33:08.000Z
diagrams/ibm/blockchain.py
loftwah/diagrams
e45804b48d5360fe5bae1b785db6527db5a57d16
[ "MIT" ]
1,068
2020-02-05T11:54:29.000Z
2022-03-30T23:28:55.000Z
# This module is automatically generated by autogen.sh. DO NOT EDIT. from . import _IBM class _Blockchain(_IBM): _type = "blockchain" _icon_dir = "resources/ibm/blockchain" class BlockchainDeveloper(_Blockchain): _icon = "blockchain-developer.png" class Blockchain(_Blockchain): _icon = "blockchain.png" class CertificateAuthority(_Blockchain): _icon = "certificate-authority.png" class ClientApplication(_Blockchain): _icon = "client-application.png" class Communication(_Blockchain): _icon = "communication.png" class Consensus(_Blockchain): _icon = "consensus.png" class EventListener(_Blockchain): _icon = "event-listener.png" class Event(_Blockchain): _icon = "event.png" class ExistingEnterpriseSystems(_Blockchain): _icon = "existing-enterprise-systems.png" class HyperledgerFabric(_Blockchain): _icon = "hyperledger-fabric.png" class KeyManagement(_Blockchain): _icon = "key-management.png" class Ledger(_Blockchain): _icon = "ledger.png" class MembershipServicesProviderApi(_Blockchain): _icon = "membership-services-provider-api.png" class Membership(_Blockchain): _icon = "membership.png" class MessageBus(_Blockchain): _icon = "message-bus.png" class Node(_Blockchain): _icon = "node.png" class Services(_Blockchain): _icon = "services.png" class SmartContract(_Blockchain): _icon = "smart-contract.png" class TransactionManager(_Blockchain): _icon = "transaction-manager.png" class Wallet(_Blockchain): _icon = "wallet.png" # Aliases
17.173913
68
0.728481
# This module is automatically generated by autogen.sh. DO NOT EDIT. from . import _IBM class _Blockchain(_IBM): _type = "blockchain" _icon_dir = "resources/ibm/blockchain" class BlockchainDeveloper(_Blockchain): _icon = "blockchain-developer.png" class Blockchain(_Blockchain): _icon = "blockchain.png" class CertificateAuthority(_Blockchain): _icon = "certificate-authority.png" class ClientApplication(_Blockchain): _icon = "client-application.png" class Communication(_Blockchain): _icon = "communication.png" class Consensus(_Blockchain): _icon = "consensus.png" class EventListener(_Blockchain): _icon = "event-listener.png" class Event(_Blockchain): _icon = "event.png" class ExistingEnterpriseSystems(_Blockchain): _icon = "existing-enterprise-systems.png" class HyperledgerFabric(_Blockchain): _icon = "hyperledger-fabric.png" class KeyManagement(_Blockchain): _icon = "key-management.png" class Ledger(_Blockchain): _icon = "ledger.png" class MembershipServicesProviderApi(_Blockchain): _icon = "membership-services-provider-api.png" class Membership(_Blockchain): _icon = "membership.png" class MessageBus(_Blockchain): _icon = "message-bus.png" class Node(_Blockchain): _icon = "node.png" class Services(_Blockchain): _icon = "services.png" class SmartContract(_Blockchain): _icon = "smart-contract.png" class TransactionManager(_Blockchain): _icon = "transaction-manager.png" class Wallet(_Blockchain): _icon = "wallet.png" # Aliases
0
0
f353ecb9975a775e9ee105aa31a7468ed23f0c58
1,634
py
Python
sp/test_remap.py
crepuscularlight/SemesterProject
acfb219ca315d912b76bb581b932aaf48090fa94
[ "MIT" ]
null
null
null
sp/test_remap.py
crepuscularlight/SemesterProject
acfb219ca315d912b76bb581b932aaf48090fa94
[ "MIT" ]
null
null
null
sp/test_remap.py
crepuscularlight/SemesterProject
acfb219ca315d912b76bb581b932aaf48090fa94
[ "MIT" ]
null
null
null
_base_='../swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py' dataset_type='CocoDataset' prefix='../coco-annotator/datasets/test/' classes=('plasticbottle','alu can','box') # classes=('',) model = dict( roi_head=dict( bbox_head=dict(num_classes=3), mask_head=dict(num_classes=3))) # train_pipeline = [ # dict(type='LoadImageFromFile'), # dict(type='LoadAnnotations', with_bbox=True, with_mask=True), # dict(type='Resize', img_scale=(128,128), keep_ratio=True), # dict(type='RandomFlip', flip_ratio=0.5), # dict(type='Normalize', **img_norm_cfg), # dict(type='Pad', size_divisor=32), # dict(type='DefaultFormatBundle'), # dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), # # ] # train1=dict( # type=dataset_type, # classes=classes, # ann_file=['data/own/test-1.json'], # img_prefix=prefix, # pipeline=train_pipeline # ) # train2=dict( # type=dataset_type, # classes=classes, # ann_file=['data/own/ann_map_to_1.json'], # img_prefix=prefix, # pipeline=train_pipeline # ) data=dict( train=dict( type=dataset_type, classes=classes, ann_file=['data/own/test-1.json','data/own/ann_map_to_1.json'], img_prefix=prefix ), # train=[train1,train2], val=dict( type=dataset_type, classes=classes, ann_file='data/own/ann_map_to_1.json', img_prefix=prefix ), test=dict( type=dataset_type, classes=classes, ann_file='data/own/ann_map_to_1.json', img_prefix=prefix ) )
29.178571
79
0.612607
_base_='../swin/mask_rcnn_swin-t-p4-w7_fpn_1x_coco.py' dataset_type='CocoDataset' prefix='../coco-annotator/datasets/test/' classes=('plasticbottle','alu can','box') # classes=('',) model = dict( roi_head=dict( bbox_head=dict(num_classes=3), mask_head=dict(num_classes=3))) # train_pipeline = [ # dict(type='LoadImageFromFile'), # dict(type='LoadAnnotations', with_bbox=True, with_mask=True), # dict(type='Resize', img_scale=(128,128), keep_ratio=True), # dict(type='RandomFlip', flip_ratio=0.5), # dict(type='Normalize', **img_norm_cfg), # dict(type='Pad', size_divisor=32), # dict(type='DefaultFormatBundle'), # dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']), # # ] # train1=dict( # type=dataset_type, # classes=classes, # ann_file=['data/own/test-1.json'], # img_prefix=prefix, # pipeline=train_pipeline # ) # train2=dict( # type=dataset_type, # classes=classes, # ann_file=['data/own/ann_map_to_1.json'], # img_prefix=prefix, # pipeline=train_pipeline # ) data=dict( train=dict( type=dataset_type, classes=classes, ann_file=['data/own/test-1.json','data/own/ann_map_to_1.json'], img_prefix=prefix ), # train=[train1,train2], val=dict( type=dataset_type, classes=classes, ann_file='data/own/ann_map_to_1.json', img_prefix=prefix ), test=dict( type=dataset_type, classes=classes, ann_file='data/own/ann_map_to_1.json', img_prefix=prefix ) )
0
0
cd903e7ade80030c34ceee4d669a0b45dddb9daa
5,234
py
Python
flickipedia/mysqlio.py
rfaulkner/Flickipedia
1b53f30be4027901748a09c411d568c7148f4e4b
[ "BSD-2-Clause" ]
1
2016-03-11T09:40:19.000Z
2016-03-11T09:40:19.000Z
flickipedia/mysqlio.py
rfaulkner/Flickipedia
1b53f30be4027901748a09c411d568c7148f4e4b
[ "BSD-2-Clause" ]
1
2015-02-27T02:23:19.000Z
2015-02-27T02:23:19.000Z
flickipedia/mysqlio.py
rfaulkner/Flickipedia
1b53f30be4027901748a09c411d568c7148f4e4b
[ "BSD-2-Clause" ]
null
null
null
""" Handle MySQL I/O via sqlalchemy engine and ORM """ from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from flickipedia.config import schema from flickipedia.config import log, settings class DataIOMySQL(object): """ Class implementing data IO for MySQL. Utilizes sqlalchemy [1]. Database and table schemas will be stored in schema. Modifications to this schema will be persisted with sync [1] http://docs.sqlalchemy.org """ DEFAULTS = { 'dialect': 'mysql', 'driver': '', 'host': 'localhost', 'port': 3306, 'db': settings.__mysql_db__, 'user': settings.__mysql_user__, 'pwrd': settings.__mysql_pass__, } def __init__(self, **kwargs): super(DataIOMySQL, self).__init__() self.engine = None self.sess = None for key in self.DEFAULTS.keys(): if kwargs.has_key(key): setattr(self, key, kwargs[key]) else: setattr(self, key, self.DEFAULTS[key]) def connect(self, log=False): """ dialect+driver://username:password@host:port/database """ if self.driver: connect_str = '{0}+{1}://{2}:{3}@{4}/{5}'.format( self.dialect, self.driver, self.user, self.pwrd, self.host, self.db, ) else: connect_str = '{0}://{1}:{2}@{3}/{4}'.format( self.dialect, self.user, self.pwrd, self.host, self.db, ) if log: log.info('Establishing connection to "%s://%s@%s/%s"' % ( self.dialect, self.user, self.host, self.db )) self.engine = create_engine(connect_str) self.make_session() def connect_lite(self): """ Use an in-memory db """ self.engine = create_engine('sqlite://') self.make_session() def make_session(self): """ Create a session """ Session = sessionmaker() Session.configure(bind=self.engine) self.sess = Session() @property def session(self): return self.sess def create_table(self, obj_name): """ Method for table creation :param name: schema object name :return: boolean indicating status """ if hasattr(schema, obj_name): getattr(schema, obj_name).__table__.create(bind=self.engine) return True else: log.error('Schema object not found for "%s"' % obj_name) return False def drop_table(self, obj_name): """ Method to drop creation :param name: schema object name :return: boolean indicating status """ if hasattr(schema, obj_name): getattr(schema, obj_name).__table__.drop(bind=self.engine) return True else: return False def fetch_all_rows(self, obj_name): """ Method to extract all rows from database. :param name: object to persist :return: row list from table """ obj = getattr(schema, obj_name) return self.session.query(obj, obj.name).all() def fetch_row(self, tbl, col, value): """ Fetch a row by id :param tbl: str, table name :param col: str, column name :param value: *, value on whih to filter """ schema_obj = getattr(schema, tbl) try: return self.session.query(schema_obj).filter( getattr(schema_obj, col) == value) except Exception as e: log.error('Couldn\'t filter row: "%s"' % e.message) return [] def insert(self, obj_name, **kwargs): """ Method to insert rows in database :param name: object to persist :param **kwargs: field values :return: boolean indicating status of action """ if not self.session: log.error('No session') return False try: log.info('Attempting to insert row in schema "%s": "%s"' % ( obj_name, str([key + ':' + str(kwargs[key])[:100] for key in kwargs]))) self.session.add(getattr(schema, obj_name)(**kwargs)) self.session.commit() return True except Exception as e: log.error('Failed to insert row: "%s"' % e.message) return False def delete(self, qry_obj): """ Method to delete rows from database :param qry_obj: object to delete :return: boolean indicating status of action """ if not self.session: log.error('No session') return False try: self.session.delete(qry_obj) self.session.commit() return True except Exception as e: log.error('Failed to delete row "%s": "%s"' % (str(qry_obj), e.message())) return False
28.601093
88
0.526557
""" Handle MySQL I/O via sqlalchemy engine and ORM """ from sqlalchemy import create_engine from sqlalchemy.orm import sessionmaker from flickipedia.config import schema from flickipedia.config import log, settings class DataIOMySQL(object): """ Class implementing data IO for MySQL. Utilizes sqlalchemy [1]. Database and table schemas will be stored in schema. Modifications to this schema will be persisted with sync [1] http://docs.sqlalchemy.org """ DEFAULTS = { 'dialect': 'mysql', 'driver': '', 'host': 'localhost', 'port': 3306, 'db': settings.__mysql_db__, 'user': settings.__mysql_user__, 'pwrd': settings.__mysql_pass__, } def __init__(self, **kwargs): super(DataIOMySQL, self).__init__() self.engine = None self.sess = None for key in self.DEFAULTS.keys(): if kwargs.has_key(key): setattr(self, key, kwargs[key]) else: setattr(self, key, self.DEFAULTS[key]) def connect(self, log=False): """ dialect+driver://username:password@host:port/database """ if self.driver: connect_str = '{0}+{1}://{2}:{3}@{4}/{5}'.format( self.dialect, self.driver, self.user, self.pwrd, self.host, self.db, ) else: connect_str = '{0}://{1}:{2}@{3}/{4}'.format( self.dialect, self.user, self.pwrd, self.host, self.db, ) if log: log.info('Establishing connection to "%s://%s@%s/%s"' % ( self.dialect, self.user, self.host, self.db )) self.engine = create_engine(connect_str) self.make_session() def connect_lite(self): """ Use an in-memory db """ self.engine = create_engine('sqlite://') self.make_session() def make_session(self): """ Create a session """ Session = sessionmaker() Session.configure(bind=self.engine) self.sess = Session() @property def session(self): return self.sess def create_table(self, obj_name): """ Method for table creation :param name: schema object name :return: boolean indicating status """ if hasattr(schema, obj_name): getattr(schema, obj_name).__table__.create(bind=self.engine) return True else: log.error('Schema object not found for "%s"' % obj_name) return False def drop_table(self, obj_name): """ Method to drop creation :param name: schema object name :return: boolean indicating status """ if hasattr(schema, obj_name): getattr(schema, obj_name).__table__.drop(bind=self.engine) return True else: return False def fetch_all_rows(self, obj_name): """ Method to extract all rows from database. :param name: object to persist :return: row list from table """ obj = getattr(schema, obj_name) return self.session.query(obj, obj.name).all() def fetch_row(self, tbl, col, value): """ Fetch a row by id :param tbl: str, table name :param col: str, column name :param value: *, value on whih to filter """ schema_obj = getattr(schema, tbl) try: return self.session.query(schema_obj).filter( getattr(schema_obj, col) == value) except Exception as e: log.error('Couldn\'t filter row: "%s"' % e.message) return [] def insert(self, obj_name, **kwargs): """ Method to insert rows in database :param name: object to persist :param **kwargs: field values :return: boolean indicating status of action """ if not self.session: log.error('No session') return False try: log.info('Attempting to insert row in schema "%s": "%s"' % ( obj_name, str([key + ':' + str(kwargs[key])[:100] for key in kwargs]))) self.session.add(getattr(schema, obj_name)(**kwargs)) self.session.commit() return True except Exception as e: log.error('Failed to insert row: "%s"' % e.message) return False def delete(self, qry_obj): """ Method to delete rows from database :param qry_obj: object to delete :return: boolean indicating status of action """ if not self.session: log.error('No session') return False try: self.session.delete(qry_obj) self.session.commit() return True except Exception as e: log.error('Failed to delete row "%s": "%s"' % (str(qry_obj), e.message())) return False
0
0
4af99f0e08de844feaa37c0def95f861de377265
1,158
py
Python
pepdb/tasks/migrations/0025_auto_20171022_0208.py
dchaplinsky/pep.org.ua
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
[ "MIT" ]
7
2015-12-21T03:52:46.000Z
2020-07-24T19:17:23.000Z
pepdb/tasks/migrations/0025_auto_20171022_0208.py
dchaplinsky/pep.org.ua
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
[ "MIT" ]
12
2016-03-05T18:11:05.000Z
2021-06-17T20:20:03.000Z
pepdb/tasks/migrations/0025_auto_20171022_0208.py
dchaplinsky/pep.org.ua
8633a65fb657d7f04dbdb12eb8ae705fa6be67e3
[ "MIT" ]
4
2016-07-17T20:19:38.000Z
2021-03-23T12:47:20.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-10-21 23:08 from __future__ import unicode_literals from django.db import migrations def count_connections(p): return p.person2company_set.count() + p.from_persons.count() + p.person2country_set.count() + p.to_persons.count() def delete_stuck_orphans(apps, schema_editor): Person = apps.get_model("core", "Person") PersonDeduplication = apps.get_model("tasks", "PersonDeduplication") for pd in PersonDeduplication.objects.filter(status="m"): try: p1 = Person.objects.get(pk=pd.person1_id) p2 = Person.objects.get(pk=pd.person2_id) if not count_connections(p1): p1.delete() if not count_connections(p2): p2.delete() if count_connections(p1) and count_connections(p2): pd.applied = False pd.save() except Person.DoesNotExist: pass class Migration(migrations.Migration): dependencies = [ ('tasks', '0024_auto_20171020_0121'), ] operations = [ migrations.RunPython(delete_stuck_orphans) ]
27.571429
118
0.632988
# -*- coding: utf-8 -*- # Generated by Django 1.11.5 on 2017-10-21 23:08 from __future__ import unicode_literals from django.db import migrations def count_connections(p): return p.person2company_set.count() + p.from_persons.count() + p.person2country_set.count() + p.to_persons.count() def delete_stuck_orphans(apps, schema_editor): Person = apps.get_model("core", "Person") PersonDeduplication = apps.get_model("tasks", "PersonDeduplication") for pd in PersonDeduplication.objects.filter(status="m"): try: p1 = Person.objects.get(pk=pd.person1_id) p2 = Person.objects.get(pk=pd.person2_id) if not count_connections(p1): p1.delete() if not count_connections(p2): p2.delete() if count_connections(p1) and count_connections(p2): pd.applied = False pd.save() except Person.DoesNotExist: pass class Migration(migrations.Migration): dependencies = [ ('tasks', '0024_auto_20171020_0121'), ] operations = [ migrations.RunPython(delete_stuck_orphans) ]
0
0
ce94a6e9468ca1ec3c62b98a42a762f04ebc1840
182
py
Python
tests/test_patient.py
genghisken/python-intermediate-inflammation
dc16cfb5824a713e8881dba1116f607793dd5f4c
[ "MIT" ]
null
null
null
tests/test_patient.py
genghisken/python-intermediate-inflammation
dc16cfb5824a713e8881dba1116f607793dd5f4c
[ "MIT" ]
20
2021-12-10T10:36:32.000Z
2021-12-10T12:46:34.000Z
code/poetry_project/tests/test_patient.py
SABS-R3/software-engineering-day4
d73cc72786fceb236cd1ec33e900e482fbad08d4
[ "CC-BY-4.0" ]
1
2021-12-10T11:54:57.000Z
2021-12-10T11:54:57.000Z
"""Tests for the Patient model.""" def test_create_patient(): from inflammation.models import Patient name = 'Alice' p = Patient(name=name) assert p.name == name
16.545455
43
0.659341
"""Tests for the Patient model.""" def test_create_patient(): from inflammation.models import Patient name = 'Alice' p = Patient(name=name) assert p.name == name
0
0
270ceac1e816bd1486bca6f8ee6231afb3d168fa
6,299
py
Python
PT2022_0412_1716_simpler_keys.py
O8pen/PhraseTranslate
62e657d1e58ab36df27f181f51410840526e939f
[ "Apache-2.0" ]
null
null
null
PT2022_0412_1716_simpler_keys.py
O8pen/PhraseTranslate
62e657d1e58ab36df27f181f51410840526e939f
[ "Apache-2.0" ]
null
null
null
PT2022_0412_1716_simpler_keys.py
O8pen/PhraseTranslate
62e657d1e58ab36df27f181f51410840526e939f
[ "Apache-2.0" ]
null
null
null
# Python 3.7.9 # pip install clipboard # pip install pywin32 # pip install pyautogui # pip install pynput # Google chrome Keyboard Shortcuts for Google Translate https://chrome.google.com/webstore/detail/keyboard-shortcuts-for-go/akjhnbnjanndggbcegmdggfjjclohjpo # alt+j listen google translate # Google chrome Dark Reader https://chrome.google.com/webstore/detail/dark-reader/eimadpbcbfnmbkopoojfekhnkhdbieeh # Microsoft edge 110% zoom - https://www.phrasereader.com/ # Google chrome 125% zoom - https://translate.google.com/ from clipboard import copy, paste from win32api import SetCursorPos, mouse_event from win32con import MOUSEEVENTF_LEFTDOWN, MOUSEEVENTF_LEFTUP from time import sleep from pyautogui import hotkey from pynput.keyboard import Listener, Key next_x = 612 next_y = 562 prev_x = 359 prev_y = 562 translate_text_x = 1356 translate_text_y = 352 translate_blank_x = 1392 translate_blank_y = 222 text = "" x = [] hasbeencaptured = False last_key = 0 was_pressed_next = False was_pressed_prev = False was_pressed_one = False was_pressed_two = False was_pressed_three = False was_pressed_four = False was_pressed_allwords = False def on_press(key): global last_key global was_pressed_next global was_pressed_prev global was_pressed_one global was_pressed_two global was_pressed_three global was_pressed_four global was_pressed_allwords # hasattr(key, 'vk') # print("Key pressed: {0}".format(key)) # print(key.vk) if hasattr(key, 'vk') and key.vk == 101: # Numpad 5 (Next Button) if was_pressed_next == False: was_pressed_next = True last_key = 101 nextbutton() elif hasattr(key, 'vk') and key.vk == 100: # Numpad 4 (Prev button) if was_pressed_prev == False: was_pressed_prev = True last_key = 100 prevbutton() elif hasattr(key, 'vk') and key.vk == 96: # Numpad 0 (Listen all words) if was_pressed_allwords == False: was_pressed_allwords = True if last_key == 96: hotkey('alt', 'j') else: last_key = 96 capture_faster() copy(text) playsound() elif hasattr(key, 'vk') and key.vk == 97: # Numpad 1 (Listen Word[1]) if was_pressed_one == False: was_pressed_one = True if last_key == 97: hotkey('alt', 'j') else: last_key = 97 capture_faster() if(len(x) >= 1): copy(x[0]) playsound() elif hasattr(key, 'vk') and key.vk == 98: # Numpad 2 (Listen Word[2]) if was_pressed_two == False: was_pressed_two = True if last_key == 98: hotkey('alt', 'j') else: last_key = 98 capture_faster() if(len(x) >= 2): copy(x[1]) playsound() elif hasattr(key, 'vk') and key.vk == 99: # Numpad 3 (Listen Word[3]) if was_pressed_three == False: was_pressed_three = True if last_key == 99: hotkey('alt', 'j') else: last_key = 99 capture_faster() if(len(x) >= 3): copy(x[2]) playsound() elif hasattr(key, 'vk') and key.vk == 102: # Numpad 6 (Listen Word[4]) if was_pressed_four == False: was_pressed_four = True if last_key == 102: hotkey('alt', 'j') else: last_key = 102 capture_faster() if(len(x) >= 4): copy(x[3]) playsound() def on_release(key): global was_pressed_next global was_pressed_prev global was_pressed_allwords global was_pressed_one global was_pressed_two global was_pressed_three global was_pressed_four if hasattr(key, 'vk') and key.vk == 101: # Numpad 5 (Next Button) was_pressed_next = False elif hasattr(key, 'vk') and key.vk == 100: # Numpad 4 (Prev button) was_pressed_prev = False elif hasattr(key, 'vk') and key.vk == 96: # Numpad 0 (Listen all words) was_pressed_allwords = False elif hasattr(key, 'vk') and key.vk == 97: # Numpad 1 (Listen Word[1]) was_pressed_one = False elif hasattr(key, 'vk') and key.vk == 98: # Numpad 2 (Listen Word[2]) was_pressed_two = False elif hasattr(key, 'vk') and key.vk == 99: # Numpad 3 (Listen Word[3]) was_pressed_three = False elif hasattr(key, 'vk') and key.vk == 102: # Numpad 6 (Listen Word[4]) was_pressed_four = False def click(x, y): SetCursorPos((x, y)) mouse_event(MOUSEEVENTF_LEFTDOWN, 0, 0) sleep(0.05) mouse_event(MOUSEEVENTF_LEFTUP, 0, 0) def nextbutton(): global hasbeencaptured global next_x global next_y click(next_x, next_y) hotkey('ctrl', 'a') sleep(0.05) hotkey('ctrl', 'c') hasbeencaptured = False def prevbutton(): global hasbeencaptured global prev_x global prev_y click(prev_x, prev_y) hotkey('ctrl', 'a') sleep(0.05) hotkey('ctrl', 'c') hasbeencaptured = False def playsound(): global translate_text_x global translate_text_y global translate_blank_x global translate_blank_y click(translate_text_x, translate_text_y) hotkey('ctrl', 'a') sleep(0.1) hotkey('ctrl', 'v') sleep(0.05) hotkey('alt', 'j') sleep(0.55) click(translate_blank_x, translate_blank_y) def capture_faster(): global text global x global hasbeencaptured if hasbeencaptured == False: text = paste() text = text[2:] endNumber = text.find('\n')-1 text = text[0:endNumber] punctuations = '''!()[]{};:'"\,<>./?@#$%^&*_~\n''' no_punct = "" for char in text: if char not in punctuations: no_punct = no_punct + char text = no_punct.lower() x = text.split(' ') hasbeencaptured = True with Listener(on_press=on_press, on_release=on_release) as listener: listener.join()
27.627193
156
0.589459
# Python 3.7.9 # pip install clipboard # pip install pywin32 # pip install pyautogui # pip install pynput # Google chrome Keyboard Shortcuts for Google Translate https://chrome.google.com/webstore/detail/keyboard-shortcuts-for-go/akjhnbnjanndggbcegmdggfjjclohjpo # alt+j listen google translate # Google chrome Dark Reader https://chrome.google.com/webstore/detail/dark-reader/eimadpbcbfnmbkopoojfekhnkhdbieeh # Microsoft edge 110% zoom - https://www.phrasereader.com/ # Google chrome 125% zoom - https://translate.google.com/ from clipboard import copy, paste from win32api import SetCursorPos, mouse_event from win32con import MOUSEEVENTF_LEFTDOWN, MOUSEEVENTF_LEFTUP from time import sleep from pyautogui import hotkey from pynput.keyboard import Listener, Key next_x = 612 next_y = 562 prev_x = 359 prev_y = 562 translate_text_x = 1356 translate_text_y = 352 translate_blank_x = 1392 translate_blank_y = 222 text = "" x = [] hasbeencaptured = False last_key = 0 was_pressed_next = False was_pressed_prev = False was_pressed_one = False was_pressed_two = False was_pressed_three = False was_pressed_four = False was_pressed_allwords = False def on_press(key): global last_key global was_pressed_next global was_pressed_prev global was_pressed_one global was_pressed_two global was_pressed_three global was_pressed_four global was_pressed_allwords # hasattr(key, 'vk') # print("Key pressed: {0}".format(key)) # print(key.vk) if hasattr(key, 'vk') and key.vk == 101: # Numpad 5 (Next Button) if was_pressed_next == False: was_pressed_next = True last_key = 101 nextbutton() elif hasattr(key, 'vk') and key.vk == 100: # Numpad 4 (Prev button) if was_pressed_prev == False: was_pressed_prev = True last_key = 100 prevbutton() elif hasattr(key, 'vk') and key.vk == 96: # Numpad 0 (Listen all words) if was_pressed_allwords == False: was_pressed_allwords = True if last_key == 96: hotkey('alt', 'j') else: last_key = 96 capture_faster() copy(text) playsound() elif hasattr(key, 'vk') and key.vk == 97: # Numpad 1 (Listen Word[1]) if was_pressed_one == False: was_pressed_one = True if last_key == 97: hotkey('alt', 'j') else: last_key = 97 capture_faster() if(len(x) >= 1): copy(x[0]) playsound() elif hasattr(key, 'vk') and key.vk == 98: # Numpad 2 (Listen Word[2]) if was_pressed_two == False: was_pressed_two = True if last_key == 98: hotkey('alt', 'j') else: last_key = 98 capture_faster() if(len(x) >= 2): copy(x[1]) playsound() elif hasattr(key, 'vk') and key.vk == 99: # Numpad 3 (Listen Word[3]) if was_pressed_three == False: was_pressed_three = True if last_key == 99: hotkey('alt', 'j') else: last_key = 99 capture_faster() if(len(x) >= 3): copy(x[2]) playsound() elif hasattr(key, 'vk') and key.vk == 102: # Numpad 6 (Listen Word[4]) if was_pressed_four == False: was_pressed_four = True if last_key == 102: hotkey('alt', 'j') else: last_key = 102 capture_faster() if(len(x) >= 4): copy(x[3]) playsound() def on_release(key): global was_pressed_next global was_pressed_prev global was_pressed_allwords global was_pressed_one global was_pressed_two global was_pressed_three global was_pressed_four if hasattr(key, 'vk') and key.vk == 101: # Numpad 5 (Next Button) was_pressed_next = False elif hasattr(key, 'vk') and key.vk == 100: # Numpad 4 (Prev button) was_pressed_prev = False elif hasattr(key, 'vk') and key.vk == 96: # Numpad 0 (Listen all words) was_pressed_allwords = False elif hasattr(key, 'vk') and key.vk == 97: # Numpad 1 (Listen Word[1]) was_pressed_one = False elif hasattr(key, 'vk') and key.vk == 98: # Numpad 2 (Listen Word[2]) was_pressed_two = False elif hasattr(key, 'vk') and key.vk == 99: # Numpad 3 (Listen Word[3]) was_pressed_three = False elif hasattr(key, 'vk') and key.vk == 102: # Numpad 6 (Listen Word[4]) was_pressed_four = False def click(x, y): SetCursorPos((x, y)) mouse_event(MOUSEEVENTF_LEFTDOWN, 0, 0) sleep(0.05) mouse_event(MOUSEEVENTF_LEFTUP, 0, 0) def nextbutton(): global hasbeencaptured global next_x global next_y click(next_x, next_y) hotkey('ctrl', 'a') sleep(0.05) hotkey('ctrl', 'c') hasbeencaptured = False def prevbutton(): global hasbeencaptured global prev_x global prev_y click(prev_x, prev_y) hotkey('ctrl', 'a') sleep(0.05) hotkey('ctrl', 'c') hasbeencaptured = False def playsound(): global translate_text_x global translate_text_y global translate_blank_x global translate_blank_y click(translate_text_x, translate_text_y) hotkey('ctrl', 'a') sleep(0.1) hotkey('ctrl', 'v') sleep(0.05) hotkey('alt', 'j') sleep(0.55) click(translate_blank_x, translate_blank_y) def capture_faster(): global text global x global hasbeencaptured if hasbeencaptured == False: text = paste() text = text[2:] endNumber = text.find('\n')-1 text = text[0:endNumber] punctuations = '''!()[]{};:'"\,<>—./?@#$%^&*‘_~\n''' no_punct = "" for char in text: if char not in punctuations: no_punct = no_punct + char text = no_punct.lower() x = text.split(' ') hasbeencaptured = True with Listener(on_press=on_press, on_release=on_release) as listener: listener.join()
6
0
77826738beb692c3294e0414b44a66d2c7706884
1,019
py
Python
Python/SwapNodesInPairs.py
TonnyL/Windary
39f85cdedaaf5b85f7ce842ecef975301fc974cf
[ "MIT" ]
205
2017-11-16T08:38:46.000Z
2022-03-06T05:50:03.000Z
Python/SwapNodesInPairs.py
santosh241/Windary
39f85cdedaaf5b85f7ce842ecef975301fc974cf
[ "MIT" ]
3
2018-04-10T10:17:52.000Z
2020-12-11T08:00:09.000Z
Python/SwapNodesInPairs.py
santosh241/Windary
39f85cdedaaf5b85f7ce842ecef975301fc974cf
[ "MIT" ]
28
2018-04-10T06:42:42.000Z
2021-09-14T14:15:39.000Z
# -*- coding: UTF-8 -*- # # Given a linked list, swap every two adjacent nodes and return its head. # # For example, # Given 1->2->3->4, you should return the list as 2->1->4->3. # # Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed. # # Python, Python3 all accepted. class SwapNodesInPairs: def swapPairs(self, head): """ :type head: ListNode :rtype: ListNode """ if head is None or head.next is None: return head pre = head nxt = pre.next while pre is not None and nxt is not None: tmp = nxt.val nxt.val = pre.val pre.val = tmp pre = nxt.next if pre is not None: nxt = pre.next return head class ListNode: def __init__(self, x): self.val = x self.next = None def __eq__(self, other): return self.val == other.val and self.next == other.next
24.261905
125
0.56526
# -*- coding: UTF-8 -*- # # Given a linked list, swap every two adjacent nodes and return its head. # # For example, # Given 1->2->3->4, you should return the list as 2->1->4->3. # # Your algorithm should use only constant space. You may not modify the values in the list, only nodes itself can be changed. # # Python, Python3 all accepted. class SwapNodesInPairs: def swapPairs(self, head): """ :type head: ListNode :rtype: ListNode """ if head is None or head.next is None: return head pre = head nxt = pre.next while pre is not None and nxt is not None: tmp = nxt.val nxt.val = pre.val pre.val = tmp pre = nxt.next if pre is not None: nxt = pre.next return head class ListNode: def __init__(self, x): self.val = x self.next = None def __eq__(self, other): return self.val == other.val and self.next == other.next
0
0
07b3c976ac05f24a31d7345afae0afd0ab42d98a
1,429
py
Python
tests/test_examples/test_brml/test_chapter_03.py
vahndi/probability
6ddf88e6f3d947c96b879e426030f60eb5cb2d59
[ "MIT" ]
2
2020-02-21T00:47:03.000Z
2020-09-22T19:00:48.000Z
tests/test_examples/test_brml/test_chapter_03.py
vahndi/probability
6ddf88e6f3d947c96b879e426030f60eb5cb2d59
[ "MIT" ]
52
2020-01-16T16:05:08.000Z
2022-02-24T15:10:10.000Z
tests/test_examples/test_brml/test_chapter_03.py
vahndi/probability
6ddf88e6f3d947c96b879e426030f60eb5cb2d59
[ "MIT" ]
null
null
null
from unittest.case import TestCase from probability.discrete import Discrete, Conditional class TestChapter03(TestCase): def setUp(self) -> None: self.r = Discrete.binary(0.2, 'rain') self.s = Discrete.binary(0.1, 'sprinkler') self.j__r = Conditional.from_probs({ (1, 1): 1, (1, 0): 0.2, (0, 1): 0, (0, 0): 0.8 }, joint_variables='jack', conditional_variables='rain' ) self.t__r_s = Conditional.from_probs({ (1, 1, 0): 1, (1, 1, 1): 1, (1, 0, 1): 0.9, (1, 0, 0): 0, (0, 1, 0): 0, (0, 1, 1): 0, (0, 0, 1): 0.1, (0, 0, 0): 1 }, joint_variables='tracey', conditional_variables=['rain', 'sprinkler'] ) def test__3_1_11(self): r_s = self.r * self.s r_s_t = self.t__r_s * r_s s__t = r_s_t.given(tracey=1).p(sprinkler=1) self.assertAlmostEqual(0.3382, s__t, 4) def test__3_1_15(self): r_s = self.r * self.s j_t__r_s = self.j__r * self.t__r_s j_r_s_t = j_t__r_s * r_s j_s_t = j_r_s_t.marginal('jack', 'sprinkler', 'tracey') s__t1_j1 = j_s_t.given(tracey=1, jack=1).p(sprinkler=1) self.assertAlmostEqual(0.1604, s__t1_j1, 4)
28.58
63
0.480056
from unittest.case import TestCase from probability.discrete import Discrete, Conditional class TestChapter03(TestCase): def setUp(self) -> None: self.r = Discrete.binary(0.2, 'rain') self.s = Discrete.binary(0.1, 'sprinkler') self.j__r = Conditional.from_probs({ (1, 1): 1, (1, 0): 0.2, (0, 1): 0, (0, 0): 0.8 }, joint_variables='jack', conditional_variables='rain' ) self.t__r_s = Conditional.from_probs({ (1, 1, 0): 1, (1, 1, 1): 1, (1, 0, 1): 0.9, (1, 0, 0): 0, (0, 1, 0): 0, (0, 1, 1): 0, (0, 0, 1): 0.1, (0, 0, 0): 1 }, joint_variables='tracey', conditional_variables=['rain', 'sprinkler'] ) def test__3_1_11(self): r_s = self.r * self.s r_s_t = self.t__r_s * r_s s__t = r_s_t.given(tracey=1).p(sprinkler=1) self.assertAlmostEqual(0.3382, s__t, 4) def test__3_1_15(self): r_s = self.r * self.s j_t__r_s = self.j__r * self.t__r_s j_r_s_t = j_t__r_s * r_s j_s_t = j_r_s_t.marginal('jack', 'sprinkler', 'tracey') s__t1_j1 = j_s_t.given(tracey=1, jack=1).p(sprinkler=1) self.assertAlmostEqual(0.1604, s__t1_j1, 4)
0
0
5bf1138dbd9cc41844dc6aff07cb5e59592dbe1a
7,238
py
Python
vaelib/avb.py
rnagumo/vaelib
9505a62e07f539df1a94f1ac7e9ada694df62844
[ "MIT" ]
1
2021-11-12T14:25:05.000Z
2021-11-12T14:25:05.000Z
vaelib/avb.py
rnagumo/vaelib
9505a62e07f539df1a94f1ac7e9ada694df62844
[ "MIT" ]
null
null
null
vaelib/avb.py
rnagumo/vaelib
9505a62e07f539df1a94f1ac7e9ada694df62844
[ "MIT" ]
1
2021-12-30T12:30:53.000Z
2021-12-30T12:30:53.000Z
"""Adversarial Variational Bayes (AVB). Adversarial Variational Bayes: Unifying Variational Autoencoders and Generative Adversarial Networks http://arxiv.org/abs/1701.04722 Ref) https://github.com/gdikov/adversarial-variational-bayes http://seiya-kumada.blogspot.com/2018/07/adversarial-variational-bayes.html https://github.com/LMescheder/AdversarialVariationalBayes https://nbviewer.jupyter.org/github/hayashiyus/Thermal-VAE/blob/master/adversarial%20variational%20bayes%20toy%20example-cyclical-annealing-MNIST-898-4000.ipynb """ from typing import Dict, Iterator, Optional, Tuple import torch from torch import Tensor, nn from .base import BaseVAE, nll_bernoulli class Encoder(nn.Module): """Encoder q(z|x, e). Args: in_channels (int): Channel size of inputs. z_dim (int): Dimension size of latents. e_dim (int): Dimension size of noises. """ def __init__(self, in_channels: int, z_dim: int, e_dim: int) -> None: super().__init__() self.conv = nn.Sequential( nn.Conv2d(in_channels, 32, 4, stride=2, padding=1), nn.ReLU(), nn.Conv2d(32, 32, 4, stride=2, padding=1), nn.ReLU(), nn.Conv2d(32, 64, 4, stride=2, padding=1), nn.ReLU(), nn.Conv2d(64, 64, 4, stride=2, padding=1), nn.ReLU(), ) self.fc_x = nn.Sequential( nn.Linear(1024, 256), nn.ReLU(), nn.Linear(256, z_dim), nn.ReLU(), ) self.fc_e = nn.Sequential( nn.Linear(e_dim, z_dim), nn.ReLU(), ) self.fc = nn.Linear(z_dim * 2, z_dim) def forward(self, x: Tensor, e: Tensor) -> Tensor: """Encodes z given x, e. Args: x (torch.Tensor): Observations, size `(b, c, h, w)`. e (torch.Tensor): Noises, size `(b, e)`. Returns: z (torch.Tensor): Encoded latents, size `(b, z)`. """ h_x = self.conv(x) h_x = h_x.view(-1, 1024) h_x = self.fc_x(h_x) h_e = self.fc_e(e) z = self.fc(torch.cat([h_x, h_e], dim=1)) return z class Decoder(nn.Module): """Decoder p(x|z). Args: in_channels (int): Channel size of inputs. z_dim (int): Dimension size of latents. """ def __init__(self, in_channels: int, z_dim: int) -> None: super().__init__() self.fc = nn.Sequential( nn.Linear(z_dim, 256), nn.ReLU(), nn.Linear(256, 1024), nn.ReLU(), ) self.deconv = nn.Sequential( nn.ConvTranspose2d(64, 64, 4, stride=2, padding=1), nn.ReLU(), nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1), nn.ReLU(), nn.ConvTranspose2d(32, 32, 4, stride=2, padding=1), nn.ReLU(), nn.ConvTranspose2d(32, in_channels, 4, stride=2, padding=1), nn.Sigmoid(), ) def forward(self, z: Tensor) -> Tensor: """Encodes z given x. Args: z (torch.Tensor): Latents, size `(b, z)`. Returns: probs (torch.Tensor): Decoded observations, size `(b, c, h, w)`. """ h = self.fc(z) h = h.view(-1, 64, 4, 4) probs = self.deconv(h) return probs class Discriminator(nn.Module): """Discriminator T(x, z). Args: in_channels (int): Channel size of inputs. z_dim (int): Dimension size of latents. """ def __init__(self, in_channels: int, z_dim: int) -> None: super().__init__() self.disc_x = nn.Sequential( nn.Conv2d(in_channels, 32, 4, stride=2, padding=1), nn.LeakyReLU(), nn.Conv2d(32, 32, 4, stride=2, padding=1), nn.LeakyReLU(), nn.Conv2d(32, 64, 4, stride=2, padding=1), nn.LeakyReLU(), nn.Conv2d(64, 64, 4, stride=2, padding=1), nn.LeakyReLU(), ) self.fc_x = nn.Linear(1024, 256) self.disc_z = nn.Sequential( nn.Linear(z_dim, 512), nn.LeakyReLU(), nn.Linear(512, 512), nn.LeakyReLU(), nn.Linear(512, 256), nn.LeakyReLU(), ) self.fc = nn.Linear(512, 1) def forward(self, x: Tensor, z: Tensor) -> Tensor: """Discriminate p(x)p(z) from p(x)q(z|x). Args: x (torch.Tensor): Observations, size `(b, c, h, w)`. z (torch.Tensor): Latents, size `(b, z)`. Returns: logits (torch.Tensor): Logits, size `(b, 1)`. """ h_x = self.disc_x(x) h_x = self.fc_x(h_x.view(-1, 1024)) h_z = self.disc_z(z) logits = self.fc(torch.cat([h_x, h_z], dim=1)) return logits class AVB(BaseVAE): """Adversarial Variational Bayes. Args: in_channels (int, optional): Channel size of inputs. z_dim (int, optional): Dimension size of latents. e_dim (int, optional): Dimension size of noises. """ def __init__(self, in_channels: int = 3, z_dim: int = 10, e_dim: int = 10) -> None: super().__init__() self.z_dim = z_dim self.e_dim = e_dim self.encoder = Encoder(in_channels, z_dim, e_dim) self.decoder = Decoder(in_channels, z_dim) self.discriminator = Discriminator(in_channels, z_dim) self.bce_loss = nn.BCEWithLogitsLoss(reduction="none") self.p_mu: Tensor self.p_var: Tensor self.register_buffer("p_mu", torch.zeros(1, z_dim)) self.register_buffer("p_var", torch.ones(1, z_dim)) def inference( self, x: Tensor, y: Optional[Tensor] = None, beta: float = 1.0 ) -> Tuple[Tuple[Tensor, ...], Dict[str, Tensor]]: batch = x.size(0) e_mu = x.new_zeros((batch, self.e_dim)) e_var = x.new_ones((batch, self.e_dim)) e = e_mu + e_var ** 0.5 * torch.randn_like(e_var) z_mu = x.new_zeros((batch, self.z_dim)) z_var = x.new_ones((batch, self.z_dim)) z_p = z_mu + z_var ** 0.5 * torch.randn_like(z_var) z_q = self.encoder(x, e) recon = self.decoder(z_q) logits = self.discriminator(x, z_q) logits = beta * logits.sum(dim=1) ce_loss = nll_bernoulli(x, recon, reduce=False) ce_loss = ce_loss.sum(dim=[1, 2, 3]) log_d_q = self.bce_loss(self.discriminator(x, z_q.detach()), z_q.new_ones((batch, 1))) log_d_p = self.bce_loss(self.discriminator(x, z_p), z_p.new_zeros((batch, 1))) loss_d = (log_d_q + log_d_p).sum(dim=1) loss_dict = { "loss": logits + ce_loss, "ce_loss": ce_loss, "logits": logits, "loss_d": loss_d, } return (recon, z_q), loss_dict def sample(self, batch_size: int = 1, y: Optional[Tensor] = None) -> Tensor: mu = self.p_mu.repeat(batch_size, 1) var = self.p_var.repeat(batch_size, 1) z = mu + var ** 0.5 * torch.randn_like(var) x = self.decoder(z) return x def adversarial_parameters(self) -> Optional[Iterator]: return self.discriminator.parameters()
28.496063
160
0.553606
"""Adversarial Variational Bayes (AVB). Adversarial Variational Bayes: Unifying Variational Autoencoders and Generative Adversarial Networks http://arxiv.org/abs/1701.04722 Ref) https://github.com/gdikov/adversarial-variational-bayes http://seiya-kumada.blogspot.com/2018/07/adversarial-variational-bayes.html https://github.com/LMescheder/AdversarialVariationalBayes https://nbviewer.jupyter.org/github/hayashiyus/Thermal-VAE/blob/master/adversarial%20variational%20bayes%20toy%20example-cyclical-annealing-MNIST-898-4000.ipynb """ from typing import Dict, Iterator, Optional, Tuple import torch from torch import Tensor, nn from .base import BaseVAE, nll_bernoulli class Encoder(nn.Module): """Encoder q(z|x, e). Args: in_channels (int): Channel size of inputs. z_dim (int): Dimension size of latents. e_dim (int): Dimension size of noises. """ def __init__(self, in_channels: int, z_dim: int, e_dim: int) -> None: super().__init__() self.conv = nn.Sequential( nn.Conv2d(in_channels, 32, 4, stride=2, padding=1), nn.ReLU(), nn.Conv2d(32, 32, 4, stride=2, padding=1), nn.ReLU(), nn.Conv2d(32, 64, 4, stride=2, padding=1), nn.ReLU(), nn.Conv2d(64, 64, 4, stride=2, padding=1), nn.ReLU(), ) self.fc_x = nn.Sequential( nn.Linear(1024, 256), nn.ReLU(), nn.Linear(256, z_dim), nn.ReLU(), ) self.fc_e = nn.Sequential( nn.Linear(e_dim, z_dim), nn.ReLU(), ) self.fc = nn.Linear(z_dim * 2, z_dim) def forward(self, x: Tensor, e: Tensor) -> Tensor: """Encodes z given x, e. Args: x (torch.Tensor): Observations, size `(b, c, h, w)`. e (torch.Tensor): Noises, size `(b, e)`. Returns: z (torch.Tensor): Encoded latents, size `(b, z)`. """ h_x = self.conv(x) h_x = h_x.view(-1, 1024) h_x = self.fc_x(h_x) h_e = self.fc_e(e) z = self.fc(torch.cat([h_x, h_e], dim=1)) return z class Decoder(nn.Module): """Decoder p(x|z). Args: in_channels (int): Channel size of inputs. z_dim (int): Dimension size of latents. """ def __init__(self, in_channels: int, z_dim: int) -> None: super().__init__() self.fc = nn.Sequential( nn.Linear(z_dim, 256), nn.ReLU(), nn.Linear(256, 1024), nn.ReLU(), ) self.deconv = nn.Sequential( nn.ConvTranspose2d(64, 64, 4, stride=2, padding=1), nn.ReLU(), nn.ConvTranspose2d(64, 32, 4, stride=2, padding=1), nn.ReLU(), nn.ConvTranspose2d(32, 32, 4, stride=2, padding=1), nn.ReLU(), nn.ConvTranspose2d(32, in_channels, 4, stride=2, padding=1), nn.Sigmoid(), ) def forward(self, z: Tensor) -> Tensor: """Encodes z given x. Args: z (torch.Tensor): Latents, size `(b, z)`. Returns: probs (torch.Tensor): Decoded observations, size `(b, c, h, w)`. """ h = self.fc(z) h = h.view(-1, 64, 4, 4) probs = self.deconv(h) return probs class Discriminator(nn.Module): """Discriminator T(x, z). Args: in_channels (int): Channel size of inputs. z_dim (int): Dimension size of latents. """ def __init__(self, in_channels: int, z_dim: int) -> None: super().__init__() self.disc_x = nn.Sequential( nn.Conv2d(in_channels, 32, 4, stride=2, padding=1), nn.LeakyReLU(), nn.Conv2d(32, 32, 4, stride=2, padding=1), nn.LeakyReLU(), nn.Conv2d(32, 64, 4, stride=2, padding=1), nn.LeakyReLU(), nn.Conv2d(64, 64, 4, stride=2, padding=1), nn.LeakyReLU(), ) self.fc_x = nn.Linear(1024, 256) self.disc_z = nn.Sequential( nn.Linear(z_dim, 512), nn.LeakyReLU(), nn.Linear(512, 512), nn.LeakyReLU(), nn.Linear(512, 256), nn.LeakyReLU(), ) self.fc = nn.Linear(512, 1) def forward(self, x: Tensor, z: Tensor) -> Tensor: """Discriminate p(x)p(z) from p(x)q(z|x). Args: x (torch.Tensor): Observations, size `(b, c, h, w)`. z (torch.Tensor): Latents, size `(b, z)`. Returns: logits (torch.Tensor): Logits, size `(b, 1)`. """ h_x = self.disc_x(x) h_x = self.fc_x(h_x.view(-1, 1024)) h_z = self.disc_z(z) logits = self.fc(torch.cat([h_x, h_z], dim=1)) return logits class AVB(BaseVAE): """Adversarial Variational Bayes. Args: in_channels (int, optional): Channel size of inputs. z_dim (int, optional): Dimension size of latents. e_dim (int, optional): Dimension size of noises. """ def __init__(self, in_channels: int = 3, z_dim: int = 10, e_dim: int = 10) -> None: super().__init__() self.z_dim = z_dim self.e_dim = e_dim self.encoder = Encoder(in_channels, z_dim, e_dim) self.decoder = Decoder(in_channels, z_dim) self.discriminator = Discriminator(in_channels, z_dim) self.bce_loss = nn.BCEWithLogitsLoss(reduction="none") self.p_mu: Tensor self.p_var: Tensor self.register_buffer("p_mu", torch.zeros(1, z_dim)) self.register_buffer("p_var", torch.ones(1, z_dim)) def inference( self, x: Tensor, y: Optional[Tensor] = None, beta: float = 1.0 ) -> Tuple[Tuple[Tensor, ...], Dict[str, Tensor]]: batch = x.size(0) e_mu = x.new_zeros((batch, self.e_dim)) e_var = x.new_ones((batch, self.e_dim)) e = e_mu + e_var ** 0.5 * torch.randn_like(e_var) z_mu = x.new_zeros((batch, self.z_dim)) z_var = x.new_ones((batch, self.z_dim)) z_p = z_mu + z_var ** 0.5 * torch.randn_like(z_var) z_q = self.encoder(x, e) recon = self.decoder(z_q) logits = self.discriminator(x, z_q) logits = beta * logits.sum(dim=1) ce_loss = nll_bernoulli(x, recon, reduce=False) ce_loss = ce_loss.sum(dim=[1, 2, 3]) log_d_q = self.bce_loss(self.discriminator(x, z_q.detach()), z_q.new_ones((batch, 1))) log_d_p = self.bce_loss(self.discriminator(x, z_p), z_p.new_zeros((batch, 1))) loss_d = (log_d_q + log_d_p).sum(dim=1) loss_dict = { "loss": logits + ce_loss, "ce_loss": ce_loss, "logits": logits, "loss_d": loss_d, } return (recon, z_q), loss_dict def sample(self, batch_size: int = 1, y: Optional[Tensor] = None) -> Tensor: mu = self.p_mu.repeat(batch_size, 1) var = self.p_var.repeat(batch_size, 1) z = mu + var ** 0.5 * torch.randn_like(var) x = self.decoder(z) return x def adversarial_parameters(self) -> Optional[Iterator]: return self.discriminator.parameters()
0
0
5c9be3cb9a56ea1892323736b562a0547d2d754c
351
py
Python
tests/test_basic.py
maxclaey/httpx_auth
63803846a9d6bcc79c2daafd5ab240f4fc579f0f
[ "MIT" ]
58
2020-02-10T18:29:43.000Z
2022-03-24T06:38:23.000Z
tests/test_basic.py
maxclaey/httpx_auth
63803846a9d6bcc79c2daafd5ab240f4fc579f0f
[ "MIT" ]
40
2020-02-10T18:37:57.000Z
2022-02-16T21:05:45.000Z
tests/test_basic.py
maxclaey/httpx_auth
63803846a9d6bcc79c2daafd5ab240f4fc579f0f
[ "MIT" ]
8
2020-05-21T14:48:46.000Z
2022-01-30T11:18:43.000Z
from pytest_httpx import HTTPXMock import httpx_auth from tests.auth_helper import get_header def test_basic_authentication_send_authorization_header(httpx_mock: HTTPXMock): auth = httpx_auth.Basic("test_user", "test_pwd") assert ( get_header(httpx_mock, auth).get("Authorization") == "Basic dGVzdF91c2VyOnRlc3RfcHdk" )
27
79
0.760684
from pytest_httpx import HTTPXMock import httpx_auth from tests.auth_helper import get_header def test_basic_authentication_send_authorization_header(httpx_mock: HTTPXMock): auth = httpx_auth.Basic("test_user", "test_pwd") assert ( get_header(httpx_mock, auth).get("Authorization") == "Basic dGVzdF91c2VyOnRlc3RfcHdk" )
0
0
989d84ba9d1966b892115c10525a944d80912f4e
4,884
py
Python
boards/apollo2_evb/examples/multi_boot_secure_sample/generate_secureboot_assets.py
wher0001/AmbiqSuiteSDK
e280cbde3e366509da6768ab95471782a05d2371
[ "BSD-3-Clause" ]
25
2019-09-26T18:30:40.000Z
2022-01-21T07:42:04.000Z
boards/apollo2_evb/examples/multi_boot_secure_sample/generate_secureboot_assets.py
vaxradius/AmbiqSuite-R2.4.2
0ffd4a67ec6b63512f56556c40fe6ee4ded1a569
[ "BSD-3-Clause" ]
23
2020-01-20T17:25:02.000Z
2021-11-16T21:06:42.000Z
boards/apollo2_evb/examples/multi_boot_secure_sample/generate_secureboot_assets.py
vaxradius/AmbiqSuite-R2.4.2
0ffd4a67ec6b63512f56556c40fe6ee4ded1a569
[ "BSD-3-Clause" ]
23
2020-04-04T18:35:35.000Z
2022-03-15T07:34:02.000Z
#!/usr/bin/env python3 import argparse import sys import os # This key table has to match the one in bootloader keyTbl = [0xDEADBEEF, 0xAAAAAAAA, 0x11111111, 0x00000000, 0xFFFFFFFF, 0x55555555, 0xA5A5A5A5, 0x66666666] #****************************************************************************** # # Main function # #****************************************************************************** def main(): # Read the binary file from the command line. with open(args.binfile, mode='rb') as binfile: clear_application= binfile.read() print('Loading Clear application {} bytes from {}...'.format(len(clear_application), args.binfile), flush=True) plaintext = pad_to_block_size(clear_application, 4) ivVal = word_from_bytes(os.urandom(4), 0) print("Initialization Vector") print(hex(ivVal)) application = encrypt_app(args.keyidxVal, plaintext, ivVal) trailer = sec_trailer(args.keyidxVal, plaintext, ivVal, int(args.protectionVal, 0)) print('Saving encrypted image {} bytes to {}...'.format(len(application), args.encimagefile), flush=True) with open(args.encimagefile, mode='wb') as encimagefile: encimagebytearray = bytearray(application) encimagefile.write(encimagebytearray) print('Saving security trailer {} bytes to {}...'.format(len(trailer), args.sectrailerfile), flush=True) with open(args.sectrailerfile, mode='wb') as sectrailerfile: trailerbytearray = bytearray(trailer) sectrailerfile.write(trailerbytearray) print('Done.') #****************************************************************************** # # Turn a 32-bit number into a series of bytes for transmission. # # This command will split a 32-bit integer into an array of bytes, ordered # LSB-first for transmission over the UART. # #****************************************************************************** def int_to_bytes(n): A = [n & 0xFF, (n >> 8) & 0xFF, (n >> 16) & 0xFF, (n >> 24) & 0xFF] return A #****************************************************************************** # # Extract a word from a byte array # #****************************************************************************** def word_from_bytes(B, n): return (B[n] + (B[n + 1] << 8) + (B[n + 2] << 16) + (B[n + 3] << 24)) #****************************************************************************** # # CRC function that matches the CRC used by the Apollo bootloader. # #****************************************************************************** poly32 = 0x1EDC6F41 def crc32(L): rem = 0 for b in L: rem = rem ^ (b << 24) for i in range(8): if rem & 0x80000000: rem = ((rem << 1) ^ poly32) else: rem = (rem << 1) rem = rem & 0xFFFFFFFF return rem def pad_to_block_size(text, block_size): text_length = len(text) amount_to_pad = block_size - (text_length % block_size) if amount_to_pad == 0: amount_to_pad = block_size for i in range(0, amount_to_pad, 1): text += bytes(chr(amount_to_pad), 'ascii') return text def encrypt_app(keyidx, clear_app, iv): key32 = keyTbl[keyidx] applen = len(clear_app) enc_app = [] for i in range(0, applen, 4): word = word_from_bytes(clear_app, i) word = (word ^ iv) ^ key32 iv = word enc_app.extend(int_to_bytes(word)) return enc_app def sec_trailer(keyidx, clear_app, iv, protection): key32 = keyTbl[keyidx] secTrailer = [] secTrailer.extend(int_to_bytes(keyidx)) secTrailer.extend(int_to_bytes(protection)) applen = len(clear_app) secTrailer.extend(int_to_bytes(applen)) crc = crc32(clear_app) sig = key32 ^ crc secTrailer.extend(int_to_bytes(sig)) secTrailer.extend(int_to_bytes(iv)) # Trailer Signature secTrailerSig = crc32(secTrailer) ^ key32 secTrailer.extend(int_to_bytes(secTrailerSig)) return secTrailer #****************************************************************************** # # Main program flow # #****************************************************************************** if __name__ == '__main__': parser = argparse.ArgumentParser(description = 'Secure Image generation utility for Apollo or Apollo2') parser.add_argument('binfile', help = 'Binary file to program into the target device') parser.add_argument('keyidxVal', default=0, type=int, help = 'encryption key index') parser.add_argument('protectionVal', default=0, help = 'Image Protection Value (hex)') parser.add_argument('encimagefile', help = 'Destination file for Encrypted image') parser.add_argument('sectrailerfile', help = 'Destination file for security trailer') args = parser.parse_args() main()
33.452055
115
0.546683
#!/usr/bin/env python3 import argparse import sys import os # This key table has to match the one in bootloader keyTbl = [0xDEADBEEF, 0xAAAAAAAA, 0x11111111, 0x00000000, 0xFFFFFFFF, 0x55555555, 0xA5A5A5A5, 0x66666666] #****************************************************************************** # # Main function # #****************************************************************************** def main(): # Read the binary file from the command line. with open(args.binfile, mode='rb') as binfile: clear_application= binfile.read() print('Loading Clear application {} bytes from {}...'.format(len(clear_application), args.binfile), flush=True) plaintext = pad_to_block_size(clear_application, 4) ivVal = word_from_bytes(os.urandom(4), 0) print("Initialization Vector") print(hex(ivVal)) application = encrypt_app(args.keyidxVal, plaintext, ivVal) trailer = sec_trailer(args.keyidxVal, plaintext, ivVal, int(args.protectionVal, 0)) print('Saving encrypted image {} bytes to {}...'.format(len(application), args.encimagefile), flush=True) with open(args.encimagefile, mode='wb') as encimagefile: encimagebytearray = bytearray(application) encimagefile.write(encimagebytearray) print('Saving security trailer {} bytes to {}...'.format(len(trailer), args.sectrailerfile), flush=True) with open(args.sectrailerfile, mode='wb') as sectrailerfile: trailerbytearray = bytearray(trailer) sectrailerfile.write(trailerbytearray) print('Done.') #****************************************************************************** # # Turn a 32-bit number into a series of bytes for transmission. # # This command will split a 32-bit integer into an array of bytes, ordered # LSB-first for transmission over the UART. # #****************************************************************************** def int_to_bytes(n): A = [n & 0xFF, (n >> 8) & 0xFF, (n >> 16) & 0xFF, (n >> 24) & 0xFF] return A #****************************************************************************** # # Extract a word from a byte array # #****************************************************************************** def word_from_bytes(B, n): return (B[n] + (B[n + 1] << 8) + (B[n + 2] << 16) + (B[n + 3] << 24)) #****************************************************************************** # # CRC function that matches the CRC used by the Apollo bootloader. # #****************************************************************************** poly32 = 0x1EDC6F41 def crc32(L): rem = 0 for b in L: rem = rem ^ (b << 24) for i in range(8): if rem & 0x80000000: rem = ((rem << 1) ^ poly32) else: rem = (rem << 1) rem = rem & 0xFFFFFFFF return rem def pad_to_block_size(text, block_size): text_length = len(text) amount_to_pad = block_size - (text_length % block_size) if amount_to_pad == 0: amount_to_pad = block_size for i in range(0, amount_to_pad, 1): text += bytes(chr(amount_to_pad), 'ascii') return text def encrypt_app(keyidx, clear_app, iv): key32 = keyTbl[keyidx] applen = len(clear_app) enc_app = [] for i in range(0, applen, 4): word = word_from_bytes(clear_app, i) word = (word ^ iv) ^ key32 iv = word enc_app.extend(int_to_bytes(word)) return enc_app def sec_trailer(keyidx, clear_app, iv, protection): key32 = keyTbl[keyidx] secTrailer = [] secTrailer.extend(int_to_bytes(keyidx)) secTrailer.extend(int_to_bytes(protection)) applen = len(clear_app) secTrailer.extend(int_to_bytes(applen)) crc = crc32(clear_app) sig = key32 ^ crc secTrailer.extend(int_to_bytes(sig)) secTrailer.extend(int_to_bytes(iv)) # Trailer Signature secTrailerSig = crc32(secTrailer) ^ key32 secTrailer.extend(int_to_bytes(secTrailerSig)) return secTrailer #****************************************************************************** # # Main program flow # #****************************************************************************** if __name__ == '__main__': parser = argparse.ArgumentParser(description = 'Secure Image generation utility for Apollo or Apollo2') parser.add_argument('binfile', help = 'Binary file to program into the target device') parser.add_argument('keyidxVal', default=0, type=int, help = 'encryption key index') parser.add_argument('protectionVal', default=0, help = 'Image Protection Value (hex)') parser.add_argument('encimagefile', help = 'Destination file for Encrypted image') parser.add_argument('sectrailerfile', help = 'Destination file for security trailer') args = parser.parse_args() main()
0
0
0496b97a9bf1fb8fa8146228a03e548726184666
1,303
py
Python
backend/api/views.py
trib3/django-vue-vuetify-toy
b73fe31acf989b63511bf1779695912257c88cf2
[ "MIT" ]
null
null
null
backend/api/views.py
trib3/django-vue-vuetify-toy
b73fe31acf989b63511bf1779695912257c88cf2
[ "MIT" ]
null
null
null
backend/api/views.py
trib3/django-vue-vuetify-toy
b73fe31acf989b63511bf1779695912257c88cf2
[ "MIT" ]
null
null
null
from django.views.generic import TemplateView from django.views.decorators.cache import never_cache from django.db.models import Count, Sum from django.db.models.functions import Coalesce from backend.api.models import Profile, ProfileDisplayFields, PostAggregateFields from django.http import JsonResponse from django.http import HttpRequest # Serve Vue Application index_view = never_cache(TemplateView.as_view(template_name="index.html")) def profiles(request: HttpRequest) -> JsonResponse: """ Data about profiles and their posts :param request: Request from the client :return: JsonResponse containing a list of dictionaries that represent profiles and their posts. EX: [ { "name": "lifeoftanyamarie", "thumbnail": "thumbnail.com", "followers": 90900, "post_count": 2, "likes": 4310 },... ] """ fields = [ display.value for display in [*ProfileDisplayFields, *PostAggregateFields] ] profiles_qs = ( Profile.objects.all() .annotate( post_count=Coalesce(Count("post"), 0), likes=Coalesce(Sum("post__likes"), 0), ) .values(*fields) ) return JsonResponse(list(profiles_qs), safe=False)
30.302326
82
0.653876
from django.views.generic import TemplateView from django.views.decorators.cache import never_cache from django.db.models import Count, Sum from django.db.models.functions import Coalesce from backend.api.models import Profile, ProfileDisplayFields, PostAggregateFields from django.http import JsonResponse from django.http import HttpRequest # Serve Vue Application index_view = never_cache(TemplateView.as_view(template_name="index.html")) def profiles(request: HttpRequest) -> JsonResponse: """ Data about profiles and their posts :param request: Request from the client :return: JsonResponse containing a list of dictionaries that represent profiles and their posts. EX: [ { "name": "lifeoftanyamarie", "thumbnail": "thumbnail.com", "followers": 90900, "post_count": 2, "likes": 4310 },... ] """ fields = [ display.value for display in [*ProfileDisplayFields, *PostAggregateFields] ] profiles_qs = ( Profile.objects.all() .annotate( post_count=Coalesce(Count("post"), 0), likes=Coalesce(Sum("post__likes"), 0), ) .values(*fields) ) return JsonResponse(list(profiles_qs), safe=False)
0
0
512587114336c35d6dc9c508ffa136085e46b053
2,295
py
Python
submission/id/models.py
simonprast/wopi-engine
b3f59782659c8be42f4064bce5281afd391833be
[ "BSD-Source-Code" ]
null
null
null
submission/id/models.py
simonprast/wopi-engine
b3f59782659c8be42f4064bce5281afd391833be
[ "BSD-Source-Code" ]
null
null
null
submission/id/models.py
simonprast/wopi-engine
b3f59782659c8be42f4064bce5281afd391833be
[ "BSD-Source-Code" ]
null
null
null
# # Created on Wed Nov 18 2020 # # Copyright (c) 2020 - Simon Prast # import os import uuid from django.conf import settings from django.db import models from user.models import User class IDSubmissionManager(models.Manager): def create_submission(self, submitter, document, document_back): id_submission = IDSubmission( submitter=submitter, document=document, document_back=document_back ) id_submission.save() return id_submission def create_path(instance, filename): folder = 'ids/' + str(uuid.uuid4()) os.makedirs(os.path.join(settings.MEDIA_ROOT, folder)) return os.path.join(folder, filename) class IDSubmission(models.Model): submitter = models.ForeignKey( User, on_delete=models.SET_NULL, blank=True, null=True ) # Will be saved to settings.MEDIA_ROOT (francy.media) + /ids/ document = models.ImageField( upload_to=create_path ) document_back = models.ImageField( upload_to=create_path, blank=True, null=True ) verified = models.BooleanField(default=False) denied = models.BooleanField(default=False) latest = models.BooleanField(default=True) objects = IDSubmissionManager() REQUIRED_FIELDS = [] def save(self, *args, **kwargs): IDSubmission.objects.filter( submitter=self.submitter, latest=True).update(latest=False) self.latest = True super(IDSubmission, self).save() class Meta: verbose_name = 'ID Submission' def __str__(self): return 'Ausweis von ' + str(self.submitter) + \ ' (verified: ' + str(self.verified) + \ ', latest: ' + str(self.latest) + ')' class IDToken(models.Model): user = models.ForeignKey( User, on_delete=models.CASCADE, null=True, blank=True ) token = models.UUIDField( default=uuid.uuid4, null=True, blank=True ) created_at = models.DateTimeField( auto_now_add=True, null=True, blank=True ) called = models.BooleanField( default=False, null=True, blank=True ) uploaded = models.BooleanField( default=False, null=True, blank=True ) expired = models.BooleanField( default=False, null=True, blank=True )
25.786517
71
0.651852
# # Created on Wed Nov 18 2020 # # Copyright (c) 2020 - Simon Prast # import os import uuid from django.conf import settings from django.db import models from user.models import User class IDSubmissionManager(models.Manager): def create_submission(self, submitter, document, document_back): id_submission = IDSubmission( submitter=submitter, document=document, document_back=document_back ) id_submission.save() return id_submission def create_path(instance, filename): folder = 'ids/' + str(uuid.uuid4()) os.makedirs(os.path.join(settings.MEDIA_ROOT, folder)) return os.path.join(folder, filename) class IDSubmission(models.Model): submitter = models.ForeignKey( User, on_delete=models.SET_NULL, blank=True, null=True ) # Will be saved to settings.MEDIA_ROOT (francy.media) + /ids/ document = models.ImageField( upload_to=create_path ) document_back = models.ImageField( upload_to=create_path, blank=True, null=True ) verified = models.BooleanField(default=False) denied = models.BooleanField(default=False) latest = models.BooleanField(default=True) objects = IDSubmissionManager() REQUIRED_FIELDS = [] def save(self, *args, **kwargs): IDSubmission.objects.filter( submitter=self.submitter, latest=True).update(latest=False) self.latest = True super(IDSubmission, self).save() class Meta: verbose_name = 'ID Submission' def __str__(self): return 'Ausweis von ' + str(self.submitter) + \ ' (verified: ' + str(self.verified) + \ ', latest: ' + str(self.latest) + ')' class IDToken(models.Model): user = models.ForeignKey( User, on_delete=models.CASCADE, null=True, blank=True ) token = models.UUIDField( default=uuid.uuid4, null=True, blank=True ) created_at = models.DateTimeField( auto_now_add=True, null=True, blank=True ) called = models.BooleanField( default=False, null=True, blank=True ) uploaded = models.BooleanField( default=False, null=True, blank=True ) expired = models.BooleanField( default=False, null=True, blank=True )
0
0
54229d5bb24d7ad6c282137584e0947395e03605
418
py
Python
Session_01/py101/10_classes.py
weighanchor4414/DigitalWorldWorkshop2020
9eca3a789e5532680ab032c20fe892bdbd47b891
[ "MIT" ]
9
2020-06-05T17:01:23.000Z
2022-03-16T19:55:50.000Z
Session_01/py101/10_classes.py
weighanchor4414/DigitalWorldWorkshop2020
9eca3a789e5532680ab032c20fe892bdbd47b891
[ "MIT" ]
null
null
null
Session_01/py101/10_classes.py
weighanchor4414/DigitalWorldWorkshop2020
9eca3a789e5532680ab032c20fe892bdbd47b891
[ "MIT" ]
2
2020-02-20T16:48:35.000Z
2020-03-18T14:36:04.000Z
class People: def __init__(self, name, birthYear): self.name = name self.birthYear = birthYear self.age = 2020 - birthYear self.height = None self.pillar = None p1 = People("Maria", 1999) print(p1.name) print(p1.birthYear) print(p1.age) p1.pillar = "Architecture and Sustainable Design (ASD)" print(f"{p1.name} is {p1.age} years old, and she is majored in {p1.pillar}")
23.222222
76
0.645933
class People: def __init__(self, name, birthYear): self.name = name self.birthYear = birthYear self.age = 2020 - birthYear self.height = None self.pillar = None p1 = People("Maria", 1999) print(p1.name) print(p1.birthYear) print(p1.age) p1.pillar = "Architecture and Sustainable Design (ASD)" print(f"{p1.name} is {p1.age} years old, and she is majored in {p1.pillar}")
0
0
1f913af634f48374288edbe27e053cffc84d41af
4,845
py
Python
tests/functional/commands/test_list_command.py
aimar1986bupt/orion
6d217af1f9002aa671f8a3260a687c540ca5336d
[ "BSD-3-Clause" ]
4
2019-09-02T19:41:04.000Z
2020-04-07T13:05:47.000Z
tests/functional/commands/test_list_command.py
aimar1986bupt/orion
6d217af1f9002aa671f8a3260a687c540ca5336d
[ "BSD-3-Clause" ]
2
2018-06-26T19:17:09.000Z
2022-02-23T13:40:04.000Z
tests/functional/commands/test_list_command.py
aimar1986bupt/orion
6d217af1f9002aa671f8a3260a687c540ca5336d
[ "BSD-3-Clause" ]
2
2019-08-26T11:36:47.000Z
2020-04-07T13:05:48.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- """Perform a functional test of the list command.""" import os import orion.core.cli def test_no_exp(monkeypatch, clean_db, capsys): """Test that nothing is printed when there are no experiments.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == "" def test_single_exp(clean_db, one_experiment, capsys): """Test that the name of the experiment is printed when there is one experiment.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == " test_single_exp-v1\n" def test_no_version_backward_compatible(clean_db, one_experiment_no_version, capsys): """Test status with no experiments.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == " test_single_exp-no-version-v1\n" def test_broken_refers(clean_db, broken_refers, capsys): """Test that experiment without refers dict can be handled properly.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == " test_single_exp-v1\n" def test_two_exp(capsys, clean_db, two_experiments): """Test that experiment and child are printed.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_double_exp-v1 test_double_exp_child-v1 """ def test_three_exp(capsys, clean_db, three_experiments): """Test that experiment, child and grand-child are printed.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_double_exp-v1 test_double_exp_child-v1 test_single_exp-v1 """ def test_no_exp_name(clean_db, three_experiments, monkeypatch, capsys): """Test that nothing is printed when there are no experiments with a given name.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'I don\'t exist']) captured = capsys.readouterr().out assert captured == "" def test_exp_name(clean_db, three_experiments, monkeypatch, capsys): """Test that only the specified experiment is printed.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'test_single_exp']) captured = capsys.readouterr().out assert captured == " test_single_exp-v1\n" def test_exp_name_with_child(clean_db, three_experiments, monkeypatch, capsys): """Test that only the specified experiment is printed, and with its child.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'test_double_exp']) captured = capsys.readouterr().out assert captured == """\ test_double_exp-v1 test_double_exp_child-v1 """ def test_exp_name_child(clean_db, three_experiments, monkeypatch, capsys): """Test that only the specified child experiment is printed.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'test_double_exp_child']) captured = capsys.readouterr().out assert captured == " test_double_exp_child-v1\n" def test_exp_same_name(clean_db, two_experiments_same_name, monkeypatch, capsys): """Test that two experiments with the same name and different versions are correctly printed.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_single_exp-v1 test_single_exp-v2 """ def test_exp_family_same_name(clean_db, three_experiments_family_same_name, monkeypatch, capsys): """Test that two experiments with the same name and different versions are correctly printed even when one of them has a child. """ monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_single_exp-v2 test_single_exp-v1 test_single_exp_child-v1 """ def test_exp_family_branch_same_name(clean_db, three_experiments_branch_same_name, monkeypatch, capsys): """Test that two experiments with the same name and different versions are correctly printed even when last one has a child. """ monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_single_exp-v1 test_single_exp-v2 test_single_exp_child-v1 """
30.664557
100
0.685449
#!/usr/bin/env python # -*- coding: utf-8 -*- """Perform a functional test of the list command.""" import os import orion.core.cli def test_no_exp(monkeypatch, clean_db, capsys): """Test that nothing is printed when there are no experiments.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == "" def test_single_exp(clean_db, one_experiment, capsys): """Test that the name of the experiment is printed when there is one experiment.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == " test_single_exp-v1\n" def test_no_version_backward_compatible(clean_db, one_experiment_no_version, capsys): """Test status with no experiments.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == " test_single_exp-no-version-v1\n" def test_broken_refers(clean_db, broken_refers, capsys): """Test that experiment without refers dict can be handled properly.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == " test_single_exp-v1\n" def test_two_exp(capsys, clean_db, two_experiments): """Test that experiment and child are printed.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_double_exp-v1┐ └test_double_exp_child-v1 """ def test_three_exp(capsys, clean_db, three_experiments): """Test that experiment, child and grand-child are printed.""" orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_double_exp-v1┐ └test_double_exp_child-v1 test_single_exp-v1 """ def test_no_exp_name(clean_db, three_experiments, monkeypatch, capsys): """Test that nothing is printed when there are no experiments with a given name.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'I don\'t exist']) captured = capsys.readouterr().out assert captured == "" def test_exp_name(clean_db, three_experiments, monkeypatch, capsys): """Test that only the specified experiment is printed.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'test_single_exp']) captured = capsys.readouterr().out assert captured == " test_single_exp-v1\n" def test_exp_name_with_child(clean_db, three_experiments, monkeypatch, capsys): """Test that only the specified experiment is printed, and with its child.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'test_double_exp']) captured = capsys.readouterr().out assert captured == """\ test_double_exp-v1┐ └test_double_exp_child-v1 """ def test_exp_name_child(clean_db, three_experiments, monkeypatch, capsys): """Test that only the specified child experiment is printed.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list', '--name', 'test_double_exp_child']) captured = capsys.readouterr().out assert captured == " test_double_exp_child-v1\n" def test_exp_same_name(clean_db, two_experiments_same_name, monkeypatch, capsys): """Test that two experiments with the same name and different versions are correctly printed.""" monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_single_exp-v1┐ └test_single_exp-v2 """ def test_exp_family_same_name(clean_db, three_experiments_family_same_name, monkeypatch, capsys): """Test that two experiments with the same name and different versions are correctly printed even when one of them has a child. """ monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ ┌test_single_exp-v2 test_single_exp-v1┤ └test_single_exp_child-v1 """ def test_exp_family_branch_same_name(clean_db, three_experiments_branch_same_name, monkeypatch, capsys): """Test that two experiments with the same name and different versions are correctly printed even when last one has a child. """ monkeypatch.chdir(os.path.dirname(os.path.abspath(__file__))) orion.core.cli.main(['list']) captured = capsys.readouterr().out assert captured == """\ test_single_exp-v1┐ └test_single_exp-v2┐ └test_single_exp_child-v1 """
45
0
674848ee2ebcca4d5c540f04215985c5a2e2f772
3,641
py
Python
students/k3343/kursoviks/Artamonova__Valeriya/server/school/serializers.py
TonikX/ITMO_ICT_-WebProgramming_2020
ba566c1b3ab04585665c69860b713741906935a0
[ "MIT" ]
10
2020-03-20T09:06:12.000Z
2021-07-27T13:06:02.000Z
students/k3343/kursoviks/Artamonova__Valeriya/server/school/serializers.py
TonikX/ITMO_ICT_-WebProgramming_2020
ba566c1b3ab04585665c69860b713741906935a0
[ "MIT" ]
134
2020-03-23T09:47:48.000Z
2022-03-12T01:05:19.000Z
students/k3343/kursoviks/Artamonova__Valeriya/server/school/serializers.py
TonikX/ITMO_ICT_-WebProgramming_2020
ba566c1b3ab04585665c69860b713741906935a0
[ "MIT" ]
71
2020-03-20T12:45:56.000Z
2021-10-31T19:22:25.000Z
from rest_framework import serializers from .models import Teacher,Timetable,Klass,Pupil,Cabinet,Subject, Grade class TeacherSerializer(serializers.ModelSerializer): """ """ class Meta: model = Teacher fields = ("id", "last_name", "first_name", "second_name", "teaching_period") class TeacherAddSerializer(serializers.ModelSerializer): """ """ class Meta: model = Teacher fields = "__all__" class PupilSerializer(serializers.ModelSerializer): """ """ class Meta: model = Pupil fields = ("id", "last_name", "first_name", "second_name") class GradeCreateSerializer(serializers.ModelSerializer): """ """ class Meta: model = Grade fields = "__all__" class GradeSerializer(serializers.ModelSerializer): """ """ subject = serializers.SlugRelatedField(slug_field="subject", read_only=True) class Meta: model = Grade fields = "__all__" class PupilDetailSerializer(serializers.ModelSerializer): """ """ klass = serializers.SlugRelatedField(slug_field = "number", read_only=True) grades = GradeSerializer(many=True) class Meta: model = Pupil fields = "__all__" class PupilAddSerializer(serializers.ModelSerializer): """ """ class Meta: model = Pupil fields = "__all__" class TimetableAddSerializer(serializers.ModelSerializer): """ """ class Meta: model = Timetable fields = "__all__" class TimetableSerializer(serializers.ModelSerializer): """ """ subject_name = serializers.SlugRelatedField(slug_field="subject", read_only=True) cabinet_number = serializers.SlugRelatedField(slug_field="number", read_only=True) teacher_name = serializers.SlugRelatedField(slug_field="last_name", read_only=True) klass_name = serializers.SlugRelatedField(slug_field="number", read_only=True) class Meta: model = Timetable fields = "__all__" class KlassSerializer(serializers.ModelSerializer): """ """ teacher = serializers.SlugRelatedField(slug_field="last_name", read_only=True) class Meta: model = Klass fields = "__all__" class KlassAddSerializer(serializers.ModelSerializer): """ """ class Meta: model = Klass fields = "__all__" class KlassDetailSerializer(serializers.ModelSerializer): """ """ teacher = serializers.SlugRelatedField(slug_field="last_name", read_only=True) pupils = PupilSerializer(many=True) timetable = TimetableSerializer(many=True) class Meta: model = Klass fields = "__all__" class SubjectSerializer(serializers.ModelSerializer): """ """ class Meta: model = Subject fields = "__all__" class CabinetSerializer(serializers.ModelSerializer): """ """ teacher = serializers.SlugRelatedField(slug_field="last_name", read_only=True) class Meta: model = Cabinet fields = "__all__" class TeacherDetailSerializer(serializers.ModelSerializer): """ """ subject = serializers.SlugRelatedField(slug_field="subject", read_only=True) klass = KlassSerializer(many=True) cabinet = CabinetSerializer(many=True) class Meta: model = Teacher fields = "__all__"
28.669291
88
0.67042
from rest_framework import serializers from .models import Teacher,Timetable,Klass,Pupil,Cabinet,Subject, Grade class TeacherSerializer(serializers.ModelSerializer): """Список учителей""" class Meta: model = Teacher fields = ("id", "last_name", "first_name", "second_name", "teaching_period") class TeacherAddSerializer(serializers.ModelSerializer): """Добавление учителя""" class Meta: model = Teacher fields = "__all__" class PupilSerializer(serializers.ModelSerializer): """Список учеников""" class Meta: model = Pupil fields = ("id", "last_name", "first_name", "second_name") class GradeCreateSerializer(serializers.ModelSerializer): """Добавление оценки""" class Meta: model = Grade fields = "__all__" class GradeSerializer(serializers.ModelSerializer): """Вывод оценок""" subject = serializers.SlugRelatedField(slug_field="subject", read_only=True) class Meta: model = Grade fields = "__all__" class PupilDetailSerializer(serializers.ModelSerializer): """Досье ученика""" klass = serializers.SlugRelatedField(slug_field = "number", read_only=True) grades = GradeSerializer(many=True) class Meta: model = Pupil fields = "__all__" class PupilAddSerializer(serializers.ModelSerializer): """Добавление ученика""" class Meta: model = Pupil fields = "__all__" class TimetableAddSerializer(serializers.ModelSerializer): """Добавление расписания""" class Meta: model = Timetable fields = "__all__" class TimetableSerializer(serializers.ModelSerializer): """Вывод расписания""" subject_name = serializers.SlugRelatedField(slug_field="subject", read_only=True) cabinet_number = serializers.SlugRelatedField(slug_field="number", read_only=True) teacher_name = serializers.SlugRelatedField(slug_field="last_name", read_only=True) klass_name = serializers.SlugRelatedField(slug_field="number", read_only=True) class Meta: model = Timetable fields = "__all__" class KlassSerializer(serializers.ModelSerializer): """Список классов""" teacher = serializers.SlugRelatedField(slug_field="last_name", read_only=True) class Meta: model = Klass fields = "__all__" class KlassAddSerializer(serializers.ModelSerializer): """Добавление класса""" class Meta: model = Klass fields = "__all__" class KlassDetailSerializer(serializers.ModelSerializer): """Описание класса""" teacher = serializers.SlugRelatedField(slug_field="last_name", read_only=True) pupils = PupilSerializer(many=True) timetable = TimetableSerializer(many=True) class Meta: model = Klass fields = "__all__" class SubjectSerializer(serializers.ModelSerializer): """Список предметов""" class Meta: model = Subject fields = "__all__" class CabinetSerializer(serializers.ModelSerializer): """Список кабинетов""" teacher = serializers.SlugRelatedField(slug_field="last_name", read_only=True) class Meta: model = Cabinet fields = "__all__" class TeacherDetailSerializer(serializers.ModelSerializer): """Досье учителя""" subject = serializers.SlugRelatedField(slug_field="subject", read_only=True) klass = KlassSerializer(many=True) cabinet = CabinetSerializer(many=True) class Meta: model = Teacher fields = "__all__"
442
0
94f35cf52b9ed17eeeb19e3cea45eb8e5993057c
6,441
py
Python
video_test.py
SteveSZF/Traffic-Lane-Detection
8217808178cdf2d655d02632eb71c543d39f5258
[ "MIT" ]
2
2019-10-08T08:52:43.000Z
2019-10-08T08:55:37.000Z
video_test.py
SteveSZF/Traffic-Lane-Detection
8217808178cdf2d655d02632eb71c543d39f5258
[ "MIT" ]
null
null
null
video_test.py
SteveSZF/Traffic-Lane-Detection
8217808178cdf2d655d02632eb71c543d39f5258
[ "MIT" ]
null
null
null
import argparse import os import numpy as np from tqdm import tqdm from mypath import Path from dataloaders import make_data_loader from modeling.sync_batchnorm.replicate import patch_replication_callback from modeling.erfnet_road import * from utils.loss import SegmentationLosses from utils.calculate_weights import calculate_weigths_labels from utils.lr_scheduler import LR_Scheduler from utils.saver import Saver from utils.summaries import TensorboardSummary from utils.metrics import Evaluator from utils.LossWithUncertainty import LossWithUncertainty from dataloaders.utils import decode_segmap class Test(object): def __init__(self, args): self.args = args # Define Dataloader kwargs = {'num_workers': args.workers, 'pin_memory': True} self.test_loader, self.nclass_pixel, self.nclass_scene = make_data_loader(args, **kwargs) # Define network if args.checkname == 'erfnet': model = ERFNet(num_classes_pixel = self.nclass_pixel, num_classes_scene = self.nclass_scene,multitask = self.args.multitask) elif args.checkname == 'resnet': model = DeepLab(num_classes=self.nclass_pixel, backbone = 'resnet', output_stride=16) elif args.checkname == 'mobilenet': model = DeepLab(num_classes=self.nclass_pixel, backbone = 'mobilenet', output_stride=16) self.model = model # Using cuda if args.cuda: self.model = torch.nn.DataParallel(self.model, device_ids=self.args.gpu_ids) patch_replication_callback(self.model) self.model = self.model.cuda() # Resuming checkpoint self.best_pred = 0.0 if args.resume is not None: if not os.path.isfile(args.resume): raise RuntimeError("=> no checkpoint found at '{}'" .format(args.resume)) checkpoint = torch.load(args.resume) args.start_epoch = checkpoint['epoch'] if args.cuda: self.model.module.load_state_dict(checkpoint['state_dict']) else: self.model.load_state_dict(checkpoint['state_dict']) self.best_pred = checkpoint['best_pred'] print("=> loaded checkpoint '{}' (epoch {})" .format(args.resume, checkpoint['epoch'])) def write_test(self): self.model.eval() #self.evaluator.reset() tbar = tqdm(self.test_loader, desc='\r') saved_index = 0 for i, sample in enumerate(tbar): image = sample['image'] if self.args.cuda: image = image.cuda() with torch.no_grad(): output, output_road = self.model(image) if output_road != None: pass label_masks = torch.max(output, 1)[1].detach().cpu().numpy() image = image.detach().cpu().numpy().transpose(0, 2, 3, 1) #image = image.detach().cpu().numpy() #targets = target.detach().cpu().numpy() #print(targets.shape) for idx, label_mask in enumerate(label_masks): decode_segmap(label_mask, dataset=self.args.dataset, saved_path = self.args.saved_path + "/%(idx)05d.png" % {'idx':saved_index}, image = image[idx]) saved_index += 1 def main(): parser = argparse.ArgumentParser(description="PyTorch Lane Detection") parser.add_argument('--dataset', type=str, default='bdd100k', choices=['bdd100k'], help='dataset name (default: bdd100k)') parser.add_argument('--workers', type=int, default=4, metavar='N', help='dataloader threads') parser.add_argument('--base-w', type=int, default=960, help='base image width') parser.add_argument('--base-h', type=int, default=640, help='base image height') parser.add_argument('--crop-w', type=int, default=640, help='crop image width') parser.add_argument('--crop-h', type=int, default=480, help='crop image height') parser.add_argument('--output-w', type=int, default=640, help='output image width') parser.add_argument('--output-h', type=int, default=480, help='output image height') parser.add_argument('--multitask', type=bool, default=False, help='whether to do multi-task (default: auto)') parser.add_argument('--batch-size', type=int, default=None, metavar='N', help='input batch size for \ test (default: auto)') parser.add_argument('--no-cuda', action='store_true', default= False, help='disables CUDA training') parser.add_argument('--gpu-ids', type=str, default='0', help='use which gpu to train, must be a \ comma-separated list of integers only (default=0)') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)') # checking point parser.add_argument('--checkname', type=str, default=None, help='set the checkpoint name') parser.add_argument('--resume', type=str, default=None, help='put the path to resuming file if needed') parser.add_argument('--write-val', action='store_true', default=False, help='store val rgb results') parser.add_argument('--video', type=str, default=None, help='video segmentation only for write-val') parser.add_argument('--saved-path', type=str, default=None, help='path for saving segmentation result') args = parser.parse_args() if not os.path.exists(args.saved_path): os.makedirs(args.saved_path) args.cuda = not args.no_cuda and torch.cuda.is_available() if args.cuda: try: args.gpu_ids = [int(s) for s in args.gpu_ids.split(',')] except ValueError: raise ValueError('Argument --gpu_ids must be a comma-separated list of integers only') if args.batch_size is None: args.batch_size = 4 * len(args.gpu_ids) print(args) torch.manual_seed(args.seed) tester = Test(args) tester.write_test() if __name__ == "__main__": main()
43.816327
164
0.604409
import argparse import os import numpy as np from tqdm import tqdm from mypath import Path from dataloaders import make_data_loader from modeling.sync_batchnorm.replicate import patch_replication_callback from modeling.erfnet_road import * from utils.loss import SegmentationLosses from utils.calculate_weights import calculate_weigths_labels from utils.lr_scheduler import LR_Scheduler from utils.saver import Saver from utils.summaries import TensorboardSummary from utils.metrics import Evaluator from utils.LossWithUncertainty import LossWithUncertainty from dataloaders.utils import decode_segmap class Test(object): def __init__(self, args): self.args = args # Define Dataloader kwargs = {'num_workers': args.workers, 'pin_memory': True} self.test_loader, self.nclass_pixel, self.nclass_scene = make_data_loader(args, **kwargs) # Define network if args.checkname == 'erfnet': model = ERFNet(num_classes_pixel = self.nclass_pixel, num_classes_scene = self.nclass_scene,multitask = self.args.multitask) elif args.checkname == 'resnet': model = DeepLab(num_classes=self.nclass_pixel, backbone = 'resnet', output_stride=16) elif args.checkname == 'mobilenet': model = DeepLab(num_classes=self.nclass_pixel, backbone = 'mobilenet', output_stride=16) self.model = model # Using cuda if args.cuda: self.model = torch.nn.DataParallel(self.model, device_ids=self.args.gpu_ids) patch_replication_callback(self.model) self.model = self.model.cuda() # Resuming checkpoint self.best_pred = 0.0 if args.resume is not None: if not os.path.isfile(args.resume): raise RuntimeError("=> no checkpoint found at '{}'" .format(args.resume)) checkpoint = torch.load(args.resume) args.start_epoch = checkpoint['epoch'] if args.cuda: self.model.module.load_state_dict(checkpoint['state_dict']) else: self.model.load_state_dict(checkpoint['state_dict']) self.best_pred = checkpoint['best_pred'] print("=> loaded checkpoint '{}' (epoch {})" .format(args.resume, checkpoint['epoch'])) def write_test(self): self.model.eval() #self.evaluator.reset() tbar = tqdm(self.test_loader, desc='\r') saved_index = 0 for i, sample in enumerate(tbar): image = sample['image'] if self.args.cuda: image = image.cuda() with torch.no_grad(): output, output_road = self.model(image) if output_road != None: pass label_masks = torch.max(output, 1)[1].detach().cpu().numpy() image = image.detach().cpu().numpy().transpose(0, 2, 3, 1) #image = image.detach().cpu().numpy() #targets = target.detach().cpu().numpy() #print(targets.shape) for idx, label_mask in enumerate(label_masks): decode_segmap(label_mask, dataset=self.args.dataset, saved_path = self.args.saved_path + "/%(idx)05d.png" % {'idx':saved_index}, image = image[idx]) saved_index += 1 def main(): parser = argparse.ArgumentParser(description="PyTorch Lane Detection") parser.add_argument('--dataset', type=str, default='bdd100k', choices=['bdd100k'], help='dataset name (default: bdd100k)') parser.add_argument('--workers', type=int, default=4, metavar='N', help='dataloader threads') parser.add_argument('--base-w', type=int, default=960, help='base image width') parser.add_argument('--base-h', type=int, default=640, help='base image height') parser.add_argument('--crop-w', type=int, default=640, help='crop image width') parser.add_argument('--crop-h', type=int, default=480, help='crop image height') parser.add_argument('--output-w', type=int, default=640, help='output image width') parser.add_argument('--output-h', type=int, default=480, help='output image height') parser.add_argument('--multitask', type=bool, default=False, help='whether to do multi-task (default: auto)') parser.add_argument('--batch-size', type=int, default=None, metavar='N', help='input batch size for \ test (default: auto)') parser.add_argument('--no-cuda', action='store_true', default= False, help='disables CUDA training') parser.add_argument('--gpu-ids', type=str, default='0', help='use which gpu to train, must be a \ comma-separated list of integers only (default=0)') parser.add_argument('--seed', type=int, default=1, metavar='S', help='random seed (default: 1)') # checking point parser.add_argument('--checkname', type=str, default=None, help='set the checkpoint name') parser.add_argument('--resume', type=str, default=None, help='put the path to resuming file if needed') parser.add_argument('--write-val', action='store_true', default=False, help='store val rgb results') parser.add_argument('--video', type=str, default=None, help='video segmentation only for write-val') parser.add_argument('--saved-path', type=str, default=None, help='path for saving segmentation result') args = parser.parse_args() if not os.path.exists(args.saved_path): os.makedirs(args.saved_path) args.cuda = not args.no_cuda and torch.cuda.is_available() if args.cuda: try: args.gpu_ids = [int(s) for s in args.gpu_ids.split(',')] except ValueError: raise ValueError('Argument --gpu_ids must be a comma-separated list of integers only') if args.batch_size is None: args.batch_size = 4 * len(args.gpu_ids) print(args) torch.manual_seed(args.seed) tester = Test(args) tester.write_test() if __name__ == "__main__": main()
0
0
b72f0aa5b11153c3e11b4251b59096cbdc84677d
128
py
Python
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_0/_pkg0_1_0_0/_pkg0_1_0_0_0/_mod0_1_0_0_0_3.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_0/_pkg0_1_0_0/_pkg0_1_0_0_0/_mod0_1_0_0_0_3.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/completion/heavyStarPropagation/lib/_pkg0/_pkg0_1/_pkg0_1_0/_pkg0_1_0_0/_pkg0_1_0_0_0/_mod0_1_0_0_0_3.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
name0_1_0_0_0_3_0 = None name0_1_0_0_0_3_1 = None name0_1_0_0_0_3_2 = None name0_1_0_0_0_3_3 = None name0_1_0_0_0_3_4 = None
14.222222
24
0.820313
name0_1_0_0_0_3_0 = None name0_1_0_0_0_3_1 = None name0_1_0_0_0_3_2 = None name0_1_0_0_0_3_3 = None name0_1_0_0_0_3_4 = None
0
0
5bc3308252b8c656c9f2d85675cb4f58fd8d48c6
1,440
py
Python
covertCSVtoData.py
kobe41999/ASUS_ECG
0e20ccc92ade8130fe4a8ace3c6ef2e910631376
[ "MIT" ]
null
null
null
covertCSVtoData.py
kobe41999/ASUS_ECG
0e20ccc92ade8130fe4a8ace3c6ef2e910631376
[ "MIT" ]
null
null
null
covertCSVtoData.py
kobe41999/ASUS_ECG
0e20ccc92ade8130fe4a8ace3c6ef2e910631376
[ "MIT" ]
null
null
null
import csv import config as C import pandas as pd from sklearn import preprocessing import numpy as np def changeToList(data): dataList = [] first = data[0].replace("['", "") dataList.append(first) for i in range(len(data) - 3): dataList.append(data[i + 1]) last = data[len(data) - 1].replace("']", "") dataList.append(last) return dataList if __name__ == '__main__': df = pd.read_csv('./JsonToCSV/data0126.csv') ecgList = [] recordLen = 10000 for i in range(len(df.ECG)): ecgList.append(changeToList(df.ECG[i].split(" "))) for j in range(len(ecgList)): if recordLen > len(ecgList[j]): recordLen = len(ecgList[j]) numOfRow = [] for k in range(recordLen - 1): numOfRow.append(k) with open('try0126.csv', 'w', newline='') as csvFile: writer = csv.writer(csvFile) writer.writerow(numOfRow) for j in range(len(ecgList)): # # Min_Max_Scaler = preprocessing.MinMaxScaler(feature_range=(-5, 5)) # # MinMax_Data = Min_Max_Scaler.fit_transform(ecgList[j]) # Data # # npa = np.asarray(ecgList[j], dtype=np.float32) # # norm = np.linalg.norm(npa) # # normal_array = npa / norm X = preprocessing.scale(ecgList[j]) final = np.round(X, 4) writer.writerow(final[0:(recordLen - 1)])
27.692308
94
0.584722
import csv import config as C import pandas as pd from sklearn import preprocessing import numpy as np def changeToList(data): dataList = [] first = data[0].replace("['", "") dataList.append(first) for i in range(len(data) - 3): dataList.append(data[i + 1]) last = data[len(data) - 1].replace("']", "") dataList.append(last) return dataList if __name__ == '__main__': df = pd.read_csv('./JsonToCSV/data0126.csv') ecgList = [] recordLen = 10000 for i in range(len(df.ECG)): ecgList.append(changeToList(df.ECG[i].split(" "))) for j in range(len(ecgList)): if recordLen > len(ecgList[j]): recordLen = len(ecgList[j]) numOfRow = [] for k in range(recordLen - 1): numOfRow.append(k) with open('try0126.csv', 'w', newline='') as csvFile: writer = csv.writer(csvFile) writer.writerow(numOfRow) for j in range(len(ecgList)): # 標準化處理 # Min_Max_Scaler = preprocessing.MinMaxScaler(feature_range=(-5, 5)) # 設定縮放的區間上下限 # MinMax_Data = Min_Max_Scaler.fit_transform(ecgList[j]) # Data 為原始資料 # # npa = np.asarray(ecgList[j], dtype=np.float32) # # norm = np.linalg.norm(npa) # # normal_array = npa / norm X = preprocessing.scale(ecgList[j]) final = np.round(X, 4) writer.writerow(final[0:(recordLen - 1)])
60
0
bf32004509e5d4fd9afea11cdf392904b73b5824
1,469
py
Python
python/classes/classPermiso.py
maestromark55/bust-radio
d3552304e9e0f551359b3a6b72f0f2bc31e863f5
[ "Apache-2.0" ]
null
null
null
python/classes/classPermiso.py
maestromark55/bust-radio
d3552304e9e0f551359b3a6b72f0f2bc31e863f5
[ "Apache-2.0" ]
null
null
null
python/classes/classPermiso.py
maestromark55/bust-radio
d3552304e9e0f551359b3a6b72f0f2bc31e863f5
[ "Apache-2.0" ]
null
null
null
import sys import piLock.configuration as conf import classErrorLog as errorLog class classPermiso: def __init__(self, tag): self.rfidTag = tag try: with conf.db: c = conf.db.cursor() c.execute("SELECT * FROM %s WHERE (tarjeta_RFID=:x)" % conf.permisoTable, {"x": self.rfidTag}) row = c.fetchone() if row is None: self.tagRecognized = conf.NO self.doorNumber = None self.permission = conf.PERMISSION_NO self.personID = None self.personName = None self.personPIN = None self.personPhoto = None self.startHour = "00:00:00" self.endHour = "24:00:00" self.sundayPermission = "0" self.endDate = "2100-01-01" else: self.tagRecognized = conf.YES self.doorNumber = row[1] self.personID = row[3] self.personName = row[4] self.personPIN = int(row[5]) self.permission = row[6] self.sundayPermission = row[7] self.startHour = row[8] self.endHour = row[9] self.endDate = row[10] except: errorLog.classErrorLog(sys.exc_info())
35.829268
110
0.461538
import sys import piLock.configuration as conf import classErrorLog as errorLog class classPermiso: def __init__(self, tag): self.rfidTag = tag try: with conf.db: c = conf.db.cursor() c.execute("SELECT * FROM %s WHERE (tarjeta_RFID=:x)" % conf.permisoTable, {"x": self.rfidTag}) row = c.fetchone() if row is None: self.tagRecognized = conf.NO self.doorNumber = None self.permission = conf.PERMISSION_NO self.personID = None self.personName = None self.personPIN = None self.personPhoto = None self.startHour = "00:00:00" self.endHour = "24:00:00" self.sundayPermission = "0" self.endDate = "2100-01-01" else: self.tagRecognized = conf.YES self.doorNumber = row[1] self.personID = row[3] self.personName = row[4] self.personPIN = int(row[5]) self.permission = row[6] self.sundayPermission = row[7] self.startHour = row[8] self.endHour = row[9] self.endDate = row[10] except: errorLog.classErrorLog(sys.exc_info())
0
0
0f98aaa91b5b977fd6d211f7d9569c79ce941321
597
py
Python
Challenges/Quartiles.py
adarsh2104/Hacker-Rank-Days-of-Statistics
30a1c56dc69ae0a98c09e5075f9b6dd0b747e0f9
[ "MIT" ]
2
2021-02-26T14:28:08.000Z
2021-02-26T18:51:51.000Z
Challenges/Quartiles.py
adarsh2104/Hacker-Rank-Days-of-Statistics
30a1c56dc69ae0a98c09e5075f9b6dd0b747e0f9
[ "MIT" ]
null
null
null
Challenges/Quartiles.py
adarsh2104/Hacker-Rank-Days-of-Statistics
30a1c56dc69ae0a98c09e5075f9b6dd0b747e0f9
[ "MIT" ]
null
null
null
# Github : https://github.com/adarsh2104 # HR-Profile: https://www.hackerrank.com/adarsh_2104 # Challenge : https://www.hackerrank.com/challenges/s10-quartiles # Max Score : 30 def find_median(array): if len(array) % 2 == 1: return array[len(array) // 2] else: return (array[len(array) // 2] + array[len(array) // 2 - 1]) // 2 n = input() input_array = sorted([int(x) for x in input().split()]) print(find_median(input_array[:len(input_array)//2])) print(find_median(input_array)) print(find_median(input_array[len(input_array) // 2 + len(input_array) % 2:]))
28.428571
78
0.658291
# Github : https://github.com/adarsh2104 # HR-Profile: https://www.hackerrank.com/adarsh_2104 # Challenge : https://www.hackerrank.com/challenges/s10-quartiles # Max Score : 30 def find_median(array): if len(array) % 2 == 1: return array[len(array) // 2] else: return (array[len(array) // 2] + array[len(array) // 2 - 1]) // 2 n = input() input_array = sorted([int(x) for x in input().split()]) print(find_median(input_array[:len(input_array)//2])) print(find_median(input_array)) print(find_median(input_array[len(input_array) // 2 + len(input_array) % 2:]))
0
0
586635bed9aefd8fbc1c66989b9458a4ab61adfe
1,945
py
Python
corehq/apps/export/det/base.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
471
2015-01-10T02:55:01.000Z
2022-03-29T18:07:18.000Z
corehq/apps/export/det/base.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
14,354
2015-01-01T07:38:23.000Z
2022-03-31T20:55:14.000Z
corehq/apps/export/det/base.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
175
2015-01-06T07:16:47.000Z
2022-03-29T13:27:01.000Z
import attr from couchexport.export import export_raw from couchexport.models import Format TITLE_ROW = [ 'Source Field', 'Field', 'Map Via', 'Data Source', 'Filter Name', 'Filter Value', 'Table Name', 'Format Via', ] @attr.s class DETConfig: name = attr.ib() tables = attr.ib(factory=list) @property def table_names(self): return [t.name for t in self.tables] def get_table(self, name): filtered_tables = [t for t in self.tables if t.name == name] assert len(filtered_tables) == 1 return filtered_tables[0] def export_to_file(self, output_file): header_sheets = [] data_sheets = [] for table in self.tables: header_sheets.append((table.name, TITLE_ROW)) data_sheets.append((table.name, list(table.get_sheet_data()))) export_raw(header_sheets, data_sheets, output_file, format=Format.XLS_2007) @attr.s class DETTable: name = attr.ib() source = attr.ib() rows = attr.ib(factory=list) filter_name = attr.ib(default='') filter_value = attr.ib(default='') def get_sheet_data(self): if not self.rows: return else: for i, row in enumerate(self.rows): if i == 0: # the first row also contains the source/filter data yield [ row.source_field, row.field, row.map_via, self.source, self.filter_name, self.filter_value, ] else: yield [ row.source_field, row.field, row.map_via, ] @attr.s class DETRow: source_field = attr.ib() field = attr.ib() map_via = attr.ib(default='')
24.935897
83
0.521337
import attr from couchexport.export import export_raw from couchexport.models import Format TITLE_ROW = [ 'Source Field', 'Field', 'Map Via', 'Data Source', 'Filter Name', 'Filter Value', 'Table Name', 'Format Via', ] @attr.s class DETConfig: name = attr.ib() tables = attr.ib(factory=list) @property def table_names(self): return [t.name for t in self.tables] def get_table(self, name): filtered_tables = [t for t in self.tables if t.name == name] assert len(filtered_tables) == 1 return filtered_tables[0] def export_to_file(self, output_file): header_sheets = [] data_sheets = [] for table in self.tables: header_sheets.append((table.name, TITLE_ROW)) data_sheets.append((table.name, list(table.get_sheet_data()))) export_raw(header_sheets, data_sheets, output_file, format=Format.XLS_2007) @attr.s class DETTable: name = attr.ib() source = attr.ib() rows = attr.ib(factory=list) filter_name = attr.ib(default='') filter_value = attr.ib(default='') def get_sheet_data(self): if not self.rows: return else: for i, row in enumerate(self.rows): if i == 0: # the first row also contains the source/filter data yield [ row.source_field, row.field, row.map_via, self.source, self.filter_name, self.filter_value, ] else: yield [ row.source_field, row.field, row.map_via, ] @attr.s class DETRow: source_field = attr.ib() field = attr.ib() map_via = attr.ib(default='')
0
0
87eb59f5bcf3a945fa6fe34538a2552cbcaa1241
2,241
py
Python
wayne/trend_generators/visit_trends.py
ucl-exoplanets/wayne
48fd07588cbbab6f5a32038455e36d7fc6b89625
[ "MIT" ]
7
2017-05-30T09:01:50.000Z
2019-04-05T05:46:23.000Z
wayne/trend_generators/visit_trends.py
ucl-exoplanets/wayne
48fd07588cbbab6f5a32038455e36d7fc6b89625
[ "MIT" ]
1
2018-06-07T17:31:19.000Z
2018-06-07T19:38:27.000Z
wayne/trend_generators/visit_trends.py
ucl-exoplanets/wayne
48fd07588cbbab6f5a32038455e36d7fc6b89625
[ "MIT" ]
2
2018-04-30T23:16:22.000Z
2020-09-30T18:12:47.000Z
""" Handles visit long trends (scaling factors) applied to the observation. The classic cases are the `hook' and long term ramp """ import abc import numpy as np class BaseVisitTrend(object): """ Visit trends take input the visit planner output and generate a scaling factor that will be multiplied per exposure. They must implement the method `_gen_scaling_factors` which outputs a list of scaling factors, one per exposure """ __metaclass__ = abc.ABCMeta def __init__(self, visit_plan, coeffs=None): self.visit_plan = visit_plan self.coeffs = coeffs self.scale_factors = self._gen_scaling_factors(visit_plan, coeffs) @abc.abstractmethod def _gen_scaling_factors(self, visit_plan, coeffs): pass def get_scale_factor(self, exp_num): """ Returns the scale factor for the exposure number `exp_num`.""" return self.scale_factors[exp_num] class HookAndLongTermRamp(BaseVisitTrend): def _gen_scaling_factors(self, visit_plan, coeffs): t = visit_plan['exp_start_times'] t_0 = gen_orbit_start_times_per_exp(t, visit_plan['orbit_start_index']) ramp = self.ramp_model(t, t_0, *coeffs) return ramp @staticmethod def ramp_model(t, t_0, a1, b1, b2, to): """ Combined hook and long term ramp model :param t: time_array :param t_0: array of orbit start times (per exposure) :param a1: linear ramp gradient :param b1: exponential hook coeff1 :param b2: exponential hook coeff2 :return: ramp_model """ t = np.array(t) # wipes units if any ramp = (1 - a1 * (t - to)) * (1 - b1 * np.exp(-b2 * (t - t_0))) return ramp def gen_orbit_start_times_per_exp(time_array, obs_start_index): """Generates t0, the time of an orbit for each orbit so it can vectorised i.e for each element time_array there will be a matching element in t_0 giving the orbit start time. """ obs_index = obs_start_index[:] obs_index.append(len(time_array)) t_0 = np.zeros(len(time_array)) for i in xrange(len(obs_index) - 1): t_0[obs_index[i]:obs_index[i + 1]] = time_array[obs_start_index[i]] return t_0
30.283784
86
0.671129
""" Handles visit long trends (scaling factors) applied to the observation. The classic cases are the `hook' and long term ramp """ import abc import numpy as np class BaseVisitTrend(object): """ Visit trends take input the visit planner output and generate a scaling factor that will be multiplied per exposure. They must implement the method `_gen_scaling_factors` which outputs a list of scaling factors, one per exposure """ __metaclass__ = abc.ABCMeta def __init__(self, visit_plan, coeffs=None): self.visit_plan = visit_plan self.coeffs = coeffs self.scale_factors = self._gen_scaling_factors(visit_plan, coeffs) @abc.abstractmethod def _gen_scaling_factors(self, visit_plan, coeffs): pass def get_scale_factor(self, exp_num): """ Returns the scale factor for the exposure number `exp_num`.""" return self.scale_factors[exp_num] class HookAndLongTermRamp(BaseVisitTrend): def _gen_scaling_factors(self, visit_plan, coeffs): t = visit_plan['exp_start_times'] t_0 = gen_orbit_start_times_per_exp(t, visit_plan['orbit_start_index']) ramp = self.ramp_model(t, t_0, *coeffs) return ramp @staticmethod def ramp_model(t, t_0, a1, b1, b2, to): """ Combined hook and long term ramp model :param t: time_array :param t_0: array of orbit start times (per exposure) :param a1: linear ramp gradient :param b1: exponential hook coeff1 :param b2: exponential hook coeff2 :return: ramp_model """ t = np.array(t) # wipes units if any ramp = (1 - a1 * (t - to)) * (1 - b1 * np.exp(-b2 * (t - t_0))) return ramp def gen_orbit_start_times_per_exp(time_array, obs_start_index): """Generates t0, the time of an orbit for each orbit so it can vectorised i.e for each element time_array there will be a matching element in t_0 giving the orbit start time. """ obs_index = obs_start_index[:] obs_index.append(len(time_array)) t_0 = np.zeros(len(time_array)) for i in xrange(len(obs_index) - 1): t_0[obs_index[i]:obs_index[i + 1]] = time_array[obs_start_index[i]] return t_0
0
0
20b20caa6fbb670cc141c57bc10a431f41d617b3
14,975
py
Python
ietf/meeting/migrations/0011_ietf92_meetings.py
ekr/ietfdb
8d936836b0b9ff31cda415b0a423e3f5b33ab695
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
2
2021-11-20T03:40:40.000Z
2021-11-20T03:40:42.000Z
ietf/meeting/migrations/0011_ietf92_meetings.py
ekr/ietfdb
8d936836b0b9ff31cda415b0a423e3f5b33ab695
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
ietf/meeting/migrations/0011_ietf92_meetings.py
ekr/ietfdb
8d936836b0b9ff31cda415b0a423e3f5b33ab695
[ "BSD-3-Clause-No-Nuclear-License-2014", "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime from django.db import migrations def backfill_92_other_meetings(apps, schema_editor): Meeting = apps.get_model('meeting', 'Meeting') Schedule = apps.get_model('meeting', 'Schedule') ScheduledSession = apps.get_model('meeting', 'ScheduledSession') Room = apps.get_model('meeting', 'Room') Session = apps.get_model('meeting', 'Session') Group = apps.get_model('group', 'Group') Person = apps.get_model('person', 'Person') ietf92 = Meeting.objects.filter(number=92).first() if not ietf92: print "IETF92 not found, no data changed" else: # Clear out one orphaned ill-configured Session object qs = Session.objects.filter(meeting__number=92,name__icontains='beverage break').exclude(type_id='break') if qs.count()==1: qs.delete() agenda92 = Schedule.objects.get(meeting=ietf92,pk=ietf92.agenda.pk) map_existing = { 'Regency Ballroom': 'Lounge', 'Garden Terrace Level': 'Meet and Greet', 'Royal': 'Breakout 1', 'Continental': 'Breakout 2', 'Far East': 'Breakout 3', 'Oak ': 'Breakout 4', 'Parisian': 'Breakout 5', 'Venetian': 'Breakout 6', 'Gold': 'Breakout 7', 'International': 'Breakout 8', 'Brasserie': 'Terminal Room', 'State': 'Office #3 (Secretariat Office)', 'French': 'Meeting Room #2 (IESG Meeting Room)', } for name,functional_name in map_existing.items(): Room.objects.filter(meeting__number=92,name=name).update(functional_name=functional_name) regency = Room.objects.get(meeting=ietf92,name='Regency Ballroom') garden = Room.objects.get(meeting=ietf92,name='Garden Terrace Level') royal = Room.objects.get(meeting=ietf92,name='Royal') continental = Room.objects.get(meeting=ietf92,name='Continental') far_east = Room.objects.get(meeting=ietf92,name='Far East') oak = Room.objects.get(meeting=ietf92,name='Oak ') #parisian = Room.objects.get(meeting=ietf92,name='Parisian') #venetian = Room.objects.get(meeting=ietf92,name='Venetian') #gold = Room.objects.get(meeting=ietf92,name='Gold') #international = Room.objects.get(meeting=ietf92,name='International') brasserie = Room.objects.get(meeting=ietf92,name='Brasserie') state = Room.objects.get(meeting=ietf92,name='State') #french = Room.objects.get(meeting=ietf92,name='French') executive = Room.objects.create(meeting=ietf92,name='Executive',functional_name='Meeting Room #4 (IAOC/IAD)',capacity=20) regency_foyer = Room.objects.create(meeting=ietf92,name='Regency Foyer',functional_name='Registration',capacity=1200) florentine = Room.objects.create(meeting=ietf92,name='Florentine',functional_name='Meeting Room #1 (IAB)', capacity=40) pavilion = Room.objects.create(meeting=ietf92,name='Pavilion',functional_name='Meeting Room #6', capacity=80) terrace = Room.objects.create(meeting=ietf92,name='Terrace',functional_name='Meeting Room #7', capacity=80) panorama = Room.objects.create(meeting=ietf92,name='Panorama',functional_name='Companion Reception', capacity=200) regency.session_types.add('offagenda') pavilion.session_types.add('offagenda') pavilion.session_types.add('lead') garden.session_types.add('lead') panorama.session_types.add('offagenda') executive.session_types.add('lead') executive.session_types.add('offagenda') regency_foyer.session_types.add('offagenda') oak.session_types.add('offagenda') continental.session_types.add('offagenda') state.session_types.add('offagenda') florentine.session_types.add('offagenda') terrace.session_types.add('lead') terrace.session_types.add('offagenda') far_east.session_types.add('offagenda') brasserie.session_types.add('offagenda') royal.session_types.add('offagenda') iesg = Group.objects.get(acronym='iesg') iab = Group.objects.get(acronym='iab') iaoc = Group.objects.get(acronym='iaoc') secr = Group.objects.get(acronym='secretariat') system = Person.objects.get(name='(System)') for d, h, m, duration, type_id, groups, room, slotname, label in [ ( 20, 13, 0, 480, 'offagenda', [secr], brasserie, 'Setup', 'Hackathon: Setup'), ( 20, 8, 0, 540, 'offagenda', [secr], executive, 'Meeting', 'DNS OARC Meeting'), ( 21, 8, 0, 540, 'offagenda', [secr], executive, 'Meeting', 'DNS OARC Meeting'), ( 22, 12, 0, 720, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 22, 11, 0, 480, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 22, 19, 0, 120, 'offagenda', [secr], state, 'Meeting', 'CJK Generation Panel coordination informal meeting'), ( 22, 19, 0, 120, 'offagenda', [iab], florentine, 'Meeting', 'IAB PrivSec program'), ( 22, 8, 30, 90, 'lead', [iesg], pavilion, 'Breakfast', None), ( 22, 9, 0, 150, 'lead', [iesg], pavilion, 'Meeting', None), ( 22, 11, 30, 150, 'lead', [iab], pavilion, 'Lunch', 'IAB Lunch with the IESG'), ( 22, 11, 30, 150, 'lead', [iesg], pavilion, 'Lunch', 'IESG Lunch with the IAB'), ( 22, 14, 0, 180, 'lead', [iab], pavilion, 'Meeting', None), ( 22, 9, 0, 480, 'offagenda', [secr], terrace, 'Meeting', 'RootOPS'), ( 22, 16, 30, 60, 'offagenda', [secr], panorama, 'Reception', "Companion's Reception"), # Should this appear on agenda? ( 22, 21, 0, 180, 'lead', [secr], garden, 'Gathering', 'AMS/IESG/IAB/IAOC Gathering'), ( 22, 9, 0, 480, 'offagenda', [secr], royal, 'ICNRG', 'ICNRG'), ( 22, 19, 0, 180, 'offagenda', [secr], royal, 'Meeting', 'Huawei'), ( 22, 12, 30, 240, 'offagenda', [secr], continental, 'Meeting', 'Verisign ROA Workshop'), ( 22, 15, 15, 165, 'offagenda', [secr], far_east, 'Meeting', 'RSSAC'), ( 22, 9, 0, 150, 'offagenda', [secr], oak, 'Meeting', 'Ericsson'), ( 23, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 23, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 23, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 23, 11, 30, 180, 'offagenda', [secr], executive, 'Lunch', 'ICANN Lunch'), ( 23, 7, 0, 120, 'lead', [iesg], pavilion, 'Breakfast', 'IESG Breakfast with the IAB'), ( 23, 7, 0, 120, 'lead', [iab], pavilion, 'Breakfast', 'IAB Breakfast with the IESG'), ( 23, 11, 30, 90, 'offagenda', [secr], pavilion, 'Meeting', 'OPS Directorate Meeting'), ( 23, 19, 0, 120, 'offagenda', [secr], pavilion, 'Meeting', 'ACE'), ( 23, 7, 30, 90, 'offagenda', [secr], terrace, 'Meeting', 'NRO ECG'), ( 23, 11, 30, 90, 'offagenda', [secr], terrace, 'Meeting', 'IETF/3GPP Meeting'), ( 23, 19, 0, 120, 'offagenda', [secr], terrace, 'Meeting', 'I2NSF'), ( 23, 18, 50, 60, 'offagenda', [secr], royal, 'Meeting', 'Captive Portal Bar BOF'), ( 24, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 24, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 24, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 24, 11, 30, 90, 'offagenda', [secr], state, 'Meeting', 'HIAPS'), ( 24, 16, 30, 120, 'offagenda', [secr], state, 'Meeting', 'PDF Draft Review'), ( 24, 7, 0, 120, 'lead', [iesg], pavilion, 'Breakfast', None), ( 24, 11, 30, 90, 'offagenda', [secr], pavilion, 'Meeting', 'SECdir Meeting'), ( 24, 7, 0, 120, 'lead', [iab], terrace, 'Breakfast', None), ( 24, 9, 0, 120, 'offagenda', [secr], terrace, 'Meeting', 'ICNN DRZK Design Team'), ( 24, 11, 30, 90, 'offagenda', [secr], terrace, 'Lunch', 'RSAG/ISEB Lunch'), ( 24, 13, 0, 120, 'offagenda', [secr], terrace, 'Meeting', 'SACM'), ( 24, 15, 0, 90, 'offagenda', [secr], terrace, 'Meeting', 'RSOC Meeting'), ( 24, 17, 30, 60, 'offagenda', [secr], terrace, 'Meeting', 'SACM'), ( 24, 11, 30, 90, 'offagenda', [secr], royal, 'Meeting', 'IoT Directorate'), ( 25, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 25, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 25, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 25, 8, 0, 60, 'offagenda', [secr], state, 'Meeting', 'SFC Control Plane Offline Discussion'), ( 25, 19, 0, 240, 'offagenda', [secr], state, 'Meeting', 'WWG'), ( 25, 8, 0, 60, 'offagenda', [secr], florentine, 'Meeting', 'IAB Name Resolution'), ( 25, 6, 45, 135, 'lead', [iaoc], executive, 'Breakfast', None), ( 25, 11, 30, 90, 'offagenda', [secr], pavilion, 'Meeting', 'RMCAT'), ( 25, 19, 0, 120, 'offagenda', [secr], pavilion, 'Meeting', 'I2NSF'), ( 25, 8, 0, 60, 'offagenda', [secr], terrace, 'Meeting', 'IETF/IEEE 802 Coordination'), ( 25, 11, 30, 90, 'offagenda', [secr], terrace, 'Lunch', 'RFC Editor Lunch'), ( 25, 19, 30, 120, 'offagenda', [secr], terrace, 'Dinner', 'SSAC Dinner'), ( 26, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 26, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 26, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 26, 7, 30, 90, 'offagenda', [secr], state, 'Breakfast', 'EDU Team Breakfast'), ( 26, 14, 0, 120, 'offagenda', [secr], state, 'Meeting', 'JJB'), ( 26, 11, 30, 90, 'offagenda', [secr], florentine, 'Meeting', 'IAB Liaison Oversight'), ( 26, 18, 0, 150, 'offagenda', [secr], pavilion, 'Meeting', '6LO Security Discussion'), ( 26, 7, 0, 120, 'lead', [iab], terrace, 'Breakfast', None), ( 26, 17, 40, 60, 'offagenda', [secr], terrace, 'Meeting', 'SACM'), ( 26, 19, 30, 150, 'offagenda', [secr], royal, 'Meeting', 'Lavabit'), ( 27, 0, 0, 900, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 27, 7, 30, 90, 'offagenda', [secr], executive, 'Meeting', 'Post-Con with Ray'), ( 27, 7, 30, 75, 'offagenda', [secr], state, 'Breakfast', 'Gen-art'), ( 27, 13, 30, 90, 'lead', [iab], pavilion, 'Lunch', 'IAB Lunch with the IESG'), ( 27, 13, 30, 90, 'lead', [iesg], pavilion, 'Lunch', 'IESG Lunch with the IAB'), ]: ts = ietf92.timeslot_set.create(type_id=type_id, name=slotname, time=datetime.datetime(2015,3,d,h,m,0), duration=datetime.timedelta(minutes=duration), location=room,show_location=(type_id not in ['lead','offagenda'])) for group in groups: session = ietf92.session_set.create(name= label or "%s %s"%(group.acronym.upper(),slotname), group=group, attendees=25, requested=datetime.datetime(2014,11,1,0,0,0), requested_by=system, status_id='sched',type_id=type_id) ScheduledSession.objects.create(schedule=agenda92, timeslot=ts, session=session) class Migration(migrations.Migration): dependencies = [ ('meeting', '0010_auto_20150501_0732'), ('name', '0004_auto_20150318_1140'), ('group', '0004_auto_20150430_0847'), ('person', '0004_auto_20150308_0440'), ] operations = [ migrations.RunPython(backfill_92_other_meetings) ]
76.403061
152
0.482204
# -*- coding: utf-8 -*- from __future__ import unicode_literals import datetime from django.db import migrations def backfill_92_other_meetings(apps, schema_editor): Meeting = apps.get_model('meeting', 'Meeting') Schedule = apps.get_model('meeting', 'Schedule') ScheduledSession = apps.get_model('meeting', 'ScheduledSession') Room = apps.get_model('meeting', 'Room') Session = apps.get_model('meeting', 'Session') Group = apps.get_model('group', 'Group') Person = apps.get_model('person', 'Person') ietf92 = Meeting.objects.filter(number=92).first() if not ietf92: print "IETF92 not found, no data changed" else: # Clear out one orphaned ill-configured Session object qs = Session.objects.filter(meeting__number=92,name__icontains='beverage break').exclude(type_id='break') if qs.count()==1: qs.delete() agenda92 = Schedule.objects.get(meeting=ietf92,pk=ietf92.agenda.pk) map_existing = { 'Regency Ballroom': 'Lounge', 'Garden Terrace Level': 'Meet and Greet', 'Royal': 'Breakout 1', 'Continental': 'Breakout 2', 'Far East': 'Breakout 3', 'Oak ': 'Breakout 4', 'Parisian': 'Breakout 5', 'Venetian': 'Breakout 6', 'Gold': 'Breakout 7', 'International': 'Breakout 8', 'Brasserie': 'Terminal Room', 'State': 'Office #3 (Secretariat Office)', 'French': 'Meeting Room #2 (IESG Meeting Room)', } for name,functional_name in map_existing.items(): Room.objects.filter(meeting__number=92,name=name).update(functional_name=functional_name) regency = Room.objects.get(meeting=ietf92,name='Regency Ballroom') garden = Room.objects.get(meeting=ietf92,name='Garden Terrace Level') royal = Room.objects.get(meeting=ietf92,name='Royal') continental = Room.objects.get(meeting=ietf92,name='Continental') far_east = Room.objects.get(meeting=ietf92,name='Far East') oak = Room.objects.get(meeting=ietf92,name='Oak ') #parisian = Room.objects.get(meeting=ietf92,name='Parisian') #venetian = Room.objects.get(meeting=ietf92,name='Venetian') #gold = Room.objects.get(meeting=ietf92,name='Gold') #international = Room.objects.get(meeting=ietf92,name='International') brasserie = Room.objects.get(meeting=ietf92,name='Brasserie') state = Room.objects.get(meeting=ietf92,name='State') #french = Room.objects.get(meeting=ietf92,name='French') executive = Room.objects.create(meeting=ietf92,name='Executive',functional_name='Meeting Room #4 (IAOC/IAD)',capacity=20) regency_foyer = Room.objects.create(meeting=ietf92,name='Regency Foyer',functional_name='Registration',capacity=1200) florentine = Room.objects.create(meeting=ietf92,name='Florentine',functional_name='Meeting Room #1 (IAB)', capacity=40) pavilion = Room.objects.create(meeting=ietf92,name='Pavilion',functional_name='Meeting Room #6', capacity=80) terrace = Room.objects.create(meeting=ietf92,name='Terrace',functional_name='Meeting Room #7', capacity=80) panorama = Room.objects.create(meeting=ietf92,name='Panorama',functional_name='Companion Reception', capacity=200) regency.session_types.add('offagenda') pavilion.session_types.add('offagenda') pavilion.session_types.add('lead') garden.session_types.add('lead') panorama.session_types.add('offagenda') executive.session_types.add('lead') executive.session_types.add('offagenda') regency_foyer.session_types.add('offagenda') oak.session_types.add('offagenda') continental.session_types.add('offagenda') state.session_types.add('offagenda') florentine.session_types.add('offagenda') terrace.session_types.add('lead') terrace.session_types.add('offagenda') far_east.session_types.add('offagenda') brasserie.session_types.add('offagenda') royal.session_types.add('offagenda') iesg = Group.objects.get(acronym='iesg') iab = Group.objects.get(acronym='iab') iaoc = Group.objects.get(acronym='iaoc') secr = Group.objects.get(acronym='secretariat') system = Person.objects.get(name='(System)') for d, h, m, duration, type_id, groups, room, slotname, label in [ ( 20, 13, 0, 480, 'offagenda', [secr], brasserie, 'Setup', 'Hackathon: Setup'), ( 20, 8, 0, 540, 'offagenda', [secr], executive, 'Meeting', 'DNS OARC Meeting'), ( 21, 8, 0, 540, 'offagenda', [secr], executive, 'Meeting', 'DNS OARC Meeting'), ( 22, 12, 0, 720, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 22, 11, 0, 480, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 22, 19, 0, 120, 'offagenda', [secr], state, 'Meeting', 'CJK Generation Panel coordination informal meeting'), ( 22, 19, 0, 120, 'offagenda', [iab], florentine, 'Meeting', 'IAB PrivSec program'), ( 22, 8, 30, 90, 'lead', [iesg], pavilion, 'Breakfast', None), ( 22, 9, 0, 150, 'lead', [iesg], pavilion, 'Meeting', None), ( 22, 11, 30, 150, 'lead', [iab], pavilion, 'Lunch', 'IAB Lunch with the IESG'), ( 22, 11, 30, 150, 'lead', [iesg], pavilion, 'Lunch', 'IESG Lunch with the IAB'), ( 22, 14, 0, 180, 'lead', [iab], pavilion, 'Meeting', None), ( 22, 9, 0, 480, 'offagenda', [secr], terrace, 'Meeting', 'RootOPS'), ( 22, 16, 30, 60, 'offagenda', [secr], panorama, 'Reception', "Companion's Reception"), # Should this appear on agenda? ( 22, 21, 0, 180, 'lead', [secr], garden, 'Gathering', 'AMS/IESG/IAB/IAOC Gathering'), ( 22, 9, 0, 480, 'offagenda', [secr], royal, 'ICNRG', 'ICNRG'), ( 22, 19, 0, 180, 'offagenda', [secr], royal, 'Meeting', 'Huawei'), ( 22, 12, 30, 240, 'offagenda', [secr], continental, 'Meeting', 'Verisign ROA Workshop'), ( 22, 15, 15, 165, 'offagenda', [secr], far_east, 'Meeting', 'RSSAC'), ( 22, 9, 0, 150, 'offagenda', [secr], oak, 'Meeting', 'Ericsson'), ( 23, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 23, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 23, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 23, 11, 30, 180, 'offagenda', [secr], executive, 'Lunch', 'ICANN Lunch'), ( 23, 7, 0, 120, 'lead', [iesg], pavilion, 'Breakfast', 'IESG Breakfast with the IAB'), ( 23, 7, 0, 120, 'lead', [iab], pavilion, 'Breakfast', 'IAB Breakfast with the IESG'), ( 23, 11, 30, 90, 'offagenda', [secr], pavilion, 'Meeting', 'OPS Directorate Meeting'), ( 23, 19, 0, 120, 'offagenda', [secr], pavilion, 'Meeting', 'ACE'), ( 23, 7, 30, 90, 'offagenda', [secr], terrace, 'Meeting', 'NRO ECG'), ( 23, 11, 30, 90, 'offagenda', [secr], terrace, 'Meeting', 'IETF/3GPP Meeting'), ( 23, 19, 0, 120, 'offagenda', [secr], terrace, 'Meeting', 'I2NSF'), ( 23, 18, 50, 60, 'offagenda', [secr], royal, 'Meeting', 'Captive Portal Bar BOF'), ( 24, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 24, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 24, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 24, 11, 30, 90, 'offagenda', [secr], state, 'Meeting', 'HIAPS'), ( 24, 16, 30, 120, 'offagenda', [secr], state, 'Meeting', 'PDF Draft Review'), ( 24, 7, 0, 120, 'lead', [iesg], pavilion, 'Breakfast', None), ( 24, 11, 30, 90, 'offagenda', [secr], pavilion, 'Meeting', 'SECdir Meeting'), ( 24, 7, 0, 120, 'lead', [iab], terrace, 'Breakfast', None), ( 24, 9, 0, 120, 'offagenda', [secr], terrace, 'Meeting', 'ICNN DRZK Design Team'), ( 24, 11, 30, 90, 'offagenda', [secr], terrace, 'Lunch', 'RSAG/ISEB Lunch'), ( 24, 13, 0, 120, 'offagenda', [secr], terrace, 'Meeting', 'SACM'), ( 24, 15, 0, 90, 'offagenda', [secr], terrace, 'Meeting', 'RSOC Meeting'), ( 24, 17, 30, 60, 'offagenda', [secr], terrace, 'Meeting', 'SACM'), ( 24, 11, 30, 90, 'offagenda', [secr], royal, 'Meeting', 'IoT Directorate'), ( 25, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 25, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 25, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 25, 8, 0, 60, 'offagenda', [secr], state, 'Meeting', 'SFC Control Plane Offline Discussion'), ( 25, 19, 0, 240, 'offagenda', [secr], state, 'Meeting', 'WWG'), ( 25, 8, 0, 60, 'offagenda', [secr], florentine, 'Meeting', 'IAB Name Resolution'), ( 25, 6, 45, 135, 'lead', [iaoc], executive, 'Breakfast', None), ( 25, 11, 30, 90, 'offagenda', [secr], pavilion, 'Meeting', 'RMCAT'), ( 25, 19, 0, 120, 'offagenda', [secr], pavilion, 'Meeting', 'I2NSF'), ( 25, 8, 0, 60, 'offagenda', [secr], terrace, 'Meeting', 'IETF/IEEE 802 Coordination'), ( 25, 11, 30, 90, 'offagenda', [secr], terrace, 'Lunch', 'RFC Editor Lunch'), ( 25, 19, 30, 120, 'offagenda', [secr], terrace, 'Dinner', 'SSAC Dinner'), ( 26, 0, 0, 1440, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 26, 8, 0, 600, 'offagenda', [secr], regency_foyer, 'T-Shirt Distribution', 'T-shirt Distribution'), ( 26, 0, 0, 1440, 'offagenda', [secr], regency, 'Lounge', 'Lounge'), ( 26, 7, 30, 90, 'offagenda', [secr], state, 'Breakfast', 'EDU Team Breakfast'), ( 26, 14, 0, 120, 'offagenda', [secr], state, 'Meeting', 'JJB'), ( 26, 11, 30, 90, 'offagenda', [secr], florentine, 'Meeting', 'IAB Liaison Oversight'), ( 26, 18, 0, 150, 'offagenda', [secr], pavilion, 'Meeting', '6LO Security Discussion'), ( 26, 7, 0, 120, 'lead', [iab], terrace, 'Breakfast', None), ( 26, 17, 40, 60, 'offagenda', [secr], terrace, 'Meeting', 'SACM'), ( 26, 19, 30, 150, 'offagenda', [secr], royal, 'Meeting', 'Lavabit'), ( 27, 0, 0, 900, 'offagenda', [secr], brasserie, 'Terminal Room', 'Terminal Room Open to Attendees'), ( 27, 7, 30, 90, 'offagenda', [secr], executive, 'Meeting', 'Post-Con with Ray'), ( 27, 7, 30, 75, 'offagenda', [secr], state, 'Breakfast', 'Gen-art'), ( 27, 13, 30, 90, 'lead', [iab], pavilion, 'Lunch', 'IAB Lunch with the IESG'), ( 27, 13, 30, 90, 'lead', [iesg], pavilion, 'Lunch', 'IESG Lunch with the IAB'), ]: ts = ietf92.timeslot_set.create(type_id=type_id, name=slotname, time=datetime.datetime(2015,3,d,h,m,0), duration=datetime.timedelta(minutes=duration), location=room,show_location=(type_id not in ['lead','offagenda'])) for group in groups: session = ietf92.session_set.create(name= label or "%s %s"%(group.acronym.upper(),slotname), group=group, attendees=25, requested=datetime.datetime(2014,11,1,0,0,0), requested_by=system, status_id='sched',type_id=type_id) ScheduledSession.objects.create(schedule=agenda92, timeslot=ts, session=session) class Migration(migrations.Migration): dependencies = [ ('meeting', '0010_auto_20150501_0732'), ('name', '0004_auto_20150318_1140'), ('group', '0004_auto_20150430_0847'), ('person', '0004_auto_20150308_0440'), ] operations = [ migrations.RunPython(backfill_92_other_meetings) ]
0
0
5d8f36982b929c47137cde1f262689332f36b121
26,755
py
Python
pipelines/create_trackhub_for_project.py
PRIDE-Toolsuite/trackhub-creator
ade2cfafeaad95088664caecacb783b501c170aa
[ "Apache-2.0" ]
null
null
null
pipelines/create_trackhub_for_project.py
PRIDE-Toolsuite/trackhub-creator
ade2cfafeaad95088664caecacb783b501c170aa
[ "Apache-2.0" ]
null
null
null
pipelines/create_trackhub_for_project.py
PRIDE-Toolsuite/trackhub-creator
ade2cfafeaad95088664caecacb783b501c170aa
[ "Apache-2.0" ]
null
null
null
# # Author: Manuel Bernal Llinares # Project: trackhub-creator # Timestamp : 07-09-2017 11:24 # --- # 2017 Manuel Bernal Llinares <mbdebian@gmail.com> # All rights reserved. # """ This pipeline creates a trackhub for a PRIDE project, based on the information provided via a JSON formatted file, as it can be seen on this sample: { "trackHubName" : "PXD000625", "trackHubShortLabel" : "<a href=\"http://www.ebi.ac.uk/pride/archive/projects/PXD000625\">PXD000625</a> - Hepatoc...", "trackHubLongLabel" : "Experimental design For the label-free ...", "trackHubType" : "PROTEOMICS", "trackHubEmail" : "pride-support@ebi.ac.uk", "trackHubInternalAbsolutePath" : "...", "trackhubCreationReportFilePath": "...", "trackMaps" : [ { "trackName" : "PXD000625_10090_Original", "trackShortLabel" : "<a href=\"http://www.ebi.ac.uk/pride/archive/projects/PXD000625\">PXD000625</a> - Mus musc...", "trackLongLabel" : "Experimental design For the label-free proteome analysis 17 mice were used composed of 5 ...", "trackSpecies" : "10090", "pogoFile" : "..." } ] } """ import os import json import time # App imports import config_manager import ensembl.service import ensembl.data_downloader import trackhub.models as trackhubs import toolbox.general as general_toolbox from parallel.models import ParallelRunnerManagerFactory from parallel.exceptions import NoMoreAliveRunnersException from pogo.models import PogoRunnerFactory from pipelines.template_pipeline import TrackhubCreationPogoBasedDirector, DirectorConfigurationManager # Globals __configuration_file = None __pipeline_arguments = None __pipeline_director = None # Pipeline properties access def set_configuration_file(config_file): global __configuration_file if __configuration_file is None: __configuration_file = config_file return __configuration_file def set_pipeline_arguments(pipeline_arguments): global __pipeline_arguments if __pipeline_arguments is None: __pipeline_arguments = pipeline_arguments return __pipeline_arguments def get_pipeline_director(): global __pipeline_director if __pipeline_director is None: __pipeline_director = TrackhubCreatorForProject(config_manager.read_config_from_file(__configuration_file), __configuration_file, __pipeline_arguments) return __pipeline_director class ConfigManager(DirectorConfigurationManager): # Command Line Arguments for this pipeline look like # # This is a JSON formatted file that contains all the relevant information needed for processing the project # # data and create its trackhub # project_data_file=project_data.json # Command Line Argument keys _CONFIG_COMMAND_LINE_ARGUMENT_KEY_PROJECT_DATA_FILE = 'project_data_file' def __init__(self, configuration_object, configuration_file, pipeline_arguments): super(ConfigManager, self).__init__(configuration_object, configuration_file, pipeline_arguments) # Lazy Process command line arguments self.__pipeline_arguments_object = None self.__running_mode = None def _get_allowed_configuration_keys(self): return {self._CONFIG_COMMAND_LINE_ARGUMENT_KEY_PROJECT_DATA_FILE} def get_project_data_file_path(self): return self._get_value_for_pipeline_argument_key(self._CONFIG_COMMAND_LINE_ARGUMENT_KEY_PROJECT_DATA_FILE) def get_file_path_trackhub_creation_report(self): return os.path.join(config_manager.get_app_config_manager().get_session_working_dir(), "trackhub_creation.report") def get_project_description_url(self): # TODO - This could be made configurable in the future return "docs/index.html" # Models for dealing with the data file that describes the project class ProjectTrackDescriptor: """ This class models the tracks that are defined in the given project under the "trackMaps" section """ # Project Data File keys relative to every TrackMap object _PROJECT_DATA_FILE_KEY_TRACK_NAME = 'trackName' _PROJECT_DATA_FILE_KEY_TRACK_SHORT_LABEL = 'trackShortLabel' _PROJECT_DATA_FILE_KEY_TRACK_LONG_LABEL = 'trackLongLabel' _PROJECT_DATA_FILE_KEY_TRACK_SPECIES = 'trackSpecies' _PROJECT_DATA_FILE_KEY_TRACK_POGO_FILE_PATH = 'pogoFile' def __init__(self, project_track_descriptor_object): self.__project_track_descriptor_object = project_track_descriptor_object def _get_value_for_key(self, key, default=""): if self.__project_track_descriptor_object and (key in self.__project_track_descriptor_object): return self.__project_track_descriptor_object[key] return default def get_track_name(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_NAME) def get_track_short_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_SHORT_LABEL) def get_track_long_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_LONG_LABEL) def get_track_species(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_SPECIES) def get_track_file_path_pogo(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_POGO_FILE_PATH) class ProjectTrackhubDescriptor: """ This class models the trackhub as described by the given project description data, see sample project description information at the top of this module """ # Project Data File keys _PROJECT_DATA_FILE_KEY_TRACKHUB_NAME = 'trackHubName' _PROJECT_DATA_FILE_KEY_TRACKHUB_SHORT_LABEL = 'trackHubShortLabel' _PROJECT_DATA_FILE_KEY_TRACKHUB_LONG_LABEL = 'trackHubLongLabel' _PROJECT_DATA_FILE_KEY_TRACKHUB_HUB_TYPE = 'trackHubType' _PROJECT_DATA_FILE_KEY_TRACKHUB_EMAIL = 'trackHubEmail' _PROJECT_DATA_FILE_KEY_TRACKHUB_INTERNAL_ABSOLUTE_PATH = 'trackHubInternalAbsolutePath' _PROJECT_DATA_FILE_KEY_TRACKHUB_REPORT_FILE = 'trackhubCreationReportFilePath' _PROJECT_DATA_FILE_KEY_TRACKHUB_SECTION_TRACKMAPS = 'trackMaps' def __init__(self, project_data_file_path): self.__project_data_file_path = project_data_file_path self.__project_data_object = None self.__project_tracks_descriptors = None def _get_project_data_object(self): if not self.__project_data_object: self.__project_data_object = general_toolbox.read_json(self.__project_data_file_path) return self.__project_data_object def _get_value_for_key(self, key, default=""): # TODO - I should start thinking about refactoring this out if key in self._get_project_data_object(): return self._get_project_data_object()[key] return default def get_trackhub_name(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_NAME, os.path.basename(self.__project_data_file_path)) def get_trackhub_short_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_SHORT_LABEL, "--- NO SHORT LABEL HAS BEEN DEFINED FOR THIS TRACKHUB ---") def get_trackhub_long_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_LONG_LABEL, "--- NO LONG LABEL HAS BEEN DEFINED FOR THIS TRACKHUB ---") def get_trackhub_hub_type(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_HUB_TYPE, "PROTEOMICS") def get_trackhub_email(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_EMAIL, "pride-support@ebi.ac.uk") def get_trackhub_destination_path(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_INTERNAL_ABSOLUTE_PATH) def get_trackhub_project_defined_tracks(self): if not self.__project_tracks_descriptors: # Default value is an empty list of tracks self.__project_tracks_descriptors = [] data_file_project_track_description_objects = \ self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_SECTION_TRACKMAPS) if data_file_project_track_description_objects: self.__project_tracks_descriptors = \ [ProjectTrackDescriptor(data_file_project_track_description_object) for data_file_project_track_description_object in data_file_project_track_description_objects] return self.__project_tracks_descriptors def get_trackhub_report_file_path(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_REPORT_FILE) class PipelineResult: """ This class models the pipeline report that will be made available at the end of the pipeline execution """ _VALUE_STATUS_SUCCESS = 'SUCCESS' _VALUE_STATUS_ERROR = 'ERROR' _VALUE_STATUS_WARNING = 'WARNING' def __init__(self): self.status = self._VALUE_STATUS_SUCCESS self.error_messages = [] self.success_messages = [] self.warning_messages = [] self.hub_descriptor_file_path = "" # Absolute file path to the folder that represents the running session of the pipeline self.file_path_pipeline_session = "" # Absolute file path to the log files that belong to the running session of the pipeline self.file_path_log_files = [] # Ensembl Release used for creating the trackhub self.ensembl_release = "" def set_status_error(self): self.status = self._VALUE_STATUS_ERROR def add_error_message(self, error_message): """ Adds an error message to the pipeline report. As this report is the final word on how the pipeline performed, the first error message that is set will set the status of the pipeline as 'failed' :param error_message: error message :return: no return value """ # This is the report on the final result from running the pipeline self.set_status_error() self.error_messages.append(error_message) def add_success_message(self, success_message): """ This will add messages to the pipeline report, but it doesn't change its status. :param success_message: message to add :return: no return value """ self.success_messages.append(success_message) def add_warning_message(self, warning_message): """ This will add warning messages to the pipeline report, setting the status to 'WARNING' if it wasn't in 'ERROR' status. :param warning_message: warning message to add :return: no return value """ self.warning_messages.append(warning_message) if self.status != self._VALUE_STATUS_ERROR: self.status = self._VALUE_STATUS_WARNING def add_log_files(self, log_files): """ Add all the log files produce by the pipeline to its final report :param log_files: a list of log files to add :return: no return value """ self.file_path_log_files.extend(log_files) def __str__(self): return json.dumps({'status': self.status, 'success_messages': self.success_messages, 'warning_messages': self.warning_messages, 'error_messages': self.error_messages, 'hub_descriptor_file_path': self.hub_descriptor_file_path, 'ensembl_release': self.ensembl_release, 'pipeline_session_working_dir': self.file_path_pipeline_session, 'log_files': self.file_path_log_files}) class TrackhubCreatorForProject(TrackhubCreationPogoBasedDirector): """ Given a project description file that contains the information specified at the beginning of this module, this pipeline creates a trackhub for all the project defined tracks """ def __init__(self, configuration_object, configuration_file, pipeline_arguments): runner_id = "{}-{}".format(__name__, time.time()) super().__init__(runner_id) self.__config_manager = ConfigManager(configuration_object, configuration_file, pipeline_arguments) self.__project_trackhub_descriptor = None # Only the valid project tracks will be processed for being included in the trackhub self.__valid_project_tracks = None self.__indexed_project_tracks_by_taxonomy_id = None # Pipeline result object self.__pipeline_result_object = PipelineResult() self.__trackhub_descriptor = None self.__trackhub_exporter = None def __get_valid_project_tracks(self): """ This helper creates a list of valid trackhub tracks from the given project, i.e. tracks that meet this cirteria: - Its taxonomy ID is available on Ensembl The list of valid tracks is cached, so it won't change between multiple calls :return: a list of valid trackhub tracks for the given project """ if not self.__valid_project_tracks: self.__valid_project_tracks = [] ensembl_service = ensembl.service.get_service() for project_track_descriptor in self.__project_trackhub_descriptor.get_trackhub_project_defined_tracks(): if ensembl_service.get_species_data_service().get_species_entry_for_taxonomy_id( project_track_descriptor.get_track_species()): self.__valid_project_tracks.append(project_track_descriptor) else: self.__pipeline_result_object \ .add_warning_message("MISSING Taxonomy #{} on Ensembl" .format(project_track_descriptor.get_track_species())) return self.__valid_project_tracks def __get_index_project_track_for_taxonomy_id(self): """ Get the project tracks indexed by taxonomy id :return: map (taxonomy_id, project_track) """ if not self.__indexed_project_tracks_by_taxonomy_id: self.__indexed_project_tracks_by_taxonomy_id = {} self._get_logger().debug("Indexing #{} valid project tracks".format(len(self.__get_valid_project_tracks()))) for project_track in self.__get_valid_project_tracks(): if project_track.get_track_species() in self.__indexed_project_tracks_by_taxonomy_id: self._get_logger() \ .error("ERROR DUPLICATED TAXONOMY indexing project track '{}', " "another project track, '{}' is in the index - SKIP -" .format(project_track.get_track_name(), self.__indexed_project_tracks_by_taxonomy_id[ project_track.get_track_species()].get_track_name())) continue self.__indexed_project_tracks_by_taxonomy_id[project_track.get_track_species()] = project_track self._get_logger().debug("Project track '{}' indexed with taxonomy ID '{}'" .format(project_track.get_track_name(), project_track.get_track_species())) return self.__indexed_project_tracks_by_taxonomy_id def __get_project_track_for_taxonomy_id(self, taxonomy_id): if taxonomy_id in self.__get_index_project_track_for_taxonomy_id(): return self.__get_index_project_track_for_taxonomy_id()[taxonomy_id] # I know, we should never return None return None def _before(self): # Set Pipeline Session working directory self.__pipeline_result_object.file_path_pipeline_session = \ config_manager.get_app_config_manager().get_session_working_dir() # Add this pipeline session log files to the final report self.__pipeline_result_object.add_log_files(config_manager.get_app_config_manager().get_session_log_files()) # Add information about the Ensembl Release being used self.__pipeline_result_object.ensembl_release = str(ensembl.service.get_service().get_release_number()) if self.__config_manager.get_project_data_file_path(): self._get_logger().info("Reading Project Trackhub Descriptor from file at '{}'" .format(self.__config_manager.get_project_data_file_path())) self.__project_trackhub_descriptor = \ ProjectTrackhubDescriptor(self.__config_manager.get_project_data_file_path()) # Check that the destination folder exists if not os.path.isdir(self.__project_trackhub_descriptor.get_trackhub_destination_path()): error_message = "Trackhub destination path NOT VALID, '{}'" \ .format(self.__project_trackhub_descriptor.get_trackhub_destination_path()) self._get_logger().error(error_message) self.__pipeline_result_object.add_error_message(error_message) self.set_pipeline_status_fail() return False # Check valid project tracks if not self.__get_valid_project_tracks(): # It makes no sense to go ahead if this project has no valid tracks error_message = "Project Trackhub contains NO VALID TRACKS" self._get_logger().error(error_message) self.__pipeline_result_object.add_error_message(error_message) self.set_pipeline_status_fail() return False return True error_message = "INVALID / MISSING Project Trackhub Descriptor file, '{}'" \ .format(self.__config_manager.get_project_data_file_path()) self._get_logger().error(error_message) self.__pipeline_result_object.add_error_message(error_message) self.set_pipeline_status_fail() return False # Helpers # Override def _get_pogo_results_for_input_data(self): # TODO - Needs to be extended for abstracting from results files from '-mm' parameter use # This is a map (project_track_descriptor, PogoRunResult) pogo_run_results = {} parallel_run_manager = ParallelRunnerManagerFactory.get_parallel_runner_manager() for project_track in self.__get_valid_project_tracks(): pogo_input_file_path = project_track.get_track_file_path_pogo() pogo_protein_sequence_file_path = \ self._get_pogo_protein_sequence_file_path_for_taxonomy(project_track.get_track_species()) pogo_gtf_file_path = self._get_pogo_gtf_file_path_for_taxonomy(project_track.get_track_species()) parallel_run_manager.add_runner(PogoRunnerFactory.get_pogo_runner(project_track.get_track_species(), pogo_input_file_path, pogo_protein_sequence_file_path, pogo_gtf_file_path)) # Run PoGo with '-mm 1' parallel_run_manager.add_runner(PogoRunnerFactory.get_pogo_runner(project_track.get_track_species(), pogo_input_file_path, pogo_protein_sequence_file_path, pogo_gtf_file_path, '1')) self._get_logger().debug("Running PoGo for #{} Project Tracks".format(len(self.__get_valid_project_tracks()))) parallel_run_manager.start_runners() self._get_logger().debug("Processing PoGo runners results") try: while True: pogo_runner = parallel_run_manager.get_next_finished_runner() if not pogo_runner.is_success(): message = "PoGo FAILED running on file '{}', taxonomy #{} - SKIPPING its results" \ .format(pogo_runner.pogo_input_file, pogo_runner.ncbi_taxonomy_id) self._get_logger().error(message) self.__pipeline_result_object.add_warning_message(message) continue if pogo_runner.ncbi_taxonomy_id not in pogo_run_results: pogo_run_results[pogo_runner.ncbi_taxonomy_id] = [] self._get_logger().info("PoGo SUCCESS for taxonomy '{}', input file '{}'" .format(pogo_runner.ncbi_taxonomy_id, pogo_runner.pogo_input_file)) # Every taxonomy now has a list of PoGo run results pogo_run_results[pogo_runner.ncbi_taxonomy_id].append(pogo_runner.get_pogo_run_result()) except NoMoreAliveRunnersException as e: self._get_logger().debug("All PoGo runners results collected!") if len(pogo_run_results) == 0: message = "ALL PoGo files FAILED for this project!!!" self._get_logger().error(message) self.__pipeline_result_object.add_error_message(message) self.set_pipeline_status_fail() return pogo_run_results # Override def _get_trackhub_descriptor(self): if not self.__trackhub_descriptor: # TODO - This iteration has no description URL for the project trackhub, we should include it in the project # TODO - input json file the pipeline gets as a parameter self.__trackhub_descriptor = \ trackhubs.TrackHub(self.__project_trackhub_descriptor.get_trackhub_name(), self.__project_trackhub_descriptor.get_trackhub_short_label(), self.__project_trackhub_descriptor.get_trackhub_long_label(), self.__project_trackhub_descriptor.get_trackhub_email(), self.__config_manager.get_project_description_url()) return self.__trackhub_descriptor # Override def _get_trackhub_track_for_taxonomy_id(self, taxonomy_id, pogo_run_result): # Default values trackhub_track_title = "- NOT PROVIDED -" trackhub_track_short_label = "- NOT PROVIDED -" trackhub_track_long_label = "- NOT PROVIDED -" # Fill in the project trackhub track information if found project_track = self.__get_project_track_for_taxonomy_id(taxonomy_id) if project_track: trackhub_track_title = project_track.get_track_name() trackhub_track_short_label = project_track.get_track_short_label() trackhub_track_long_label = project_track.get_track_long_label() trackhub_track_title = "{} {}"\ .format(trackhub_track_title, self._get_trackhub_track_name_modifiers_based_on_pogo_run(pogo_run_result)) return trackhubs.BaseTrack(trackhub_track_title, trackhub_track_short_label, trackhub_track_long_label) # Override def _get_trackhub_exporter(self): if not self.__trackhub_exporter: self._get_logger().info("Default trackhub exporter - 'TrackHubLocalFilesystemExporter'") self.__trackhub_exporter = trackhubs.TrackHubLocalFilesystemExporter() return self.__trackhub_exporter # Override def _prepare_trackhub_destination_folder(self, trackhub_exporter): self._get_logger().info("Trackhub destination folder ---> '{}'" .format(self.__project_trackhub_descriptor.get_trackhub_destination_path())) trackhub_exporter.track_hub_destination_folder = \ self.__project_trackhub_descriptor.get_trackhub_destination_path() def _run_pipeline(self): if not self.is_pipeline_status_ok(): error_message = "--- ABORT Pipeline Execution ---, the previous stage failed" self._get_logger().warning(error_message) self.__pipeline_result_object.add_error_message(error_message) return False # Use default trackhub creation workflow try: self._create_trackhub() except Exception as e: # I know this is too generic but, for this iteration of the software it is completely fine self.__pipeline_result_object.add_error_message(str(e)) self.set_pipeline_status_fail() return False # Fill in the pipeline report self.__pipeline_result_object.hub_descriptor_file_path = \ self._get_trackhub_exporter() \ .export_summary \ .track_hub_descriptor_file_path for message in self._get_trackhub_exporter().export_summary.warnings: self.__pipeline_result_object.add_warning_message(message) for message in self._get_trackhub_exporter().export_summary.errors: self.__pipeline_result_object.add_error_message(message) if self._get_trackhub_exporter().export_summary.errors: self.set_pipeline_status_fail() return True def _after(self): """ Dump to a file the pipeline report :return: no return value """ if not self.is_pipeline_status_ok(): self._get_logger().warning("This Pipeline is finishing with NON-OK status.") report_files = [self.__config_manager.get_file_path_trackhub_creation_report()] if self.__project_trackhub_descriptor \ and self.__project_trackhub_descriptor.get_trackhub_report_file_path(): report_files.append(self.__project_trackhub_descriptor.get_trackhub_report_file_path()) for report_file in report_files: self._get_logger().info("Dumping Pipeline Report to '{}'".format(report_file)) with open(report_file, 'w') as f: f.write(str(self.__pipeline_result_object)) return True if __name__ == '__main__': print("ERROR: This script is part of a pipeline collection and it is not meant to be run in stand alone mode")
50.196998
120
0.676808
# # Author    : Manuel Bernal Llinares # Project   : trackhub-creator # Timestamp : 07-09-2017 11:24 # --- # © 2017 Manuel Bernal Llinares <mbdebian@gmail.com> # All rights reserved. # """ This pipeline creates a trackhub for a PRIDE project, based on the information provided via a JSON formatted file, as it can be seen on this sample: { "trackHubName" : "PXD000625", "trackHubShortLabel" : "<a href=\"http://www.ebi.ac.uk/pride/archive/projects/PXD000625\">PXD000625</a> - Hepatoc...", "trackHubLongLabel" : "Experimental design For the label-free ...", "trackHubType" : "PROTEOMICS", "trackHubEmail" : "pride-support@ebi.ac.uk", "trackHubInternalAbsolutePath" : "...", "trackhubCreationReportFilePath": "...", "trackMaps" : [ { "trackName" : "PXD000625_10090_Original", "trackShortLabel" : "<a href=\"http://www.ebi.ac.uk/pride/archive/projects/PXD000625\">PXD000625</a> - Mus musc...", "trackLongLabel" : "Experimental design For the label-free proteome analysis 17 mice were used composed of 5 ...", "trackSpecies" : "10090", "pogoFile" : "..." } ] } """ import os import json import time # App imports import config_manager import ensembl.service import ensembl.data_downloader import trackhub.models as trackhubs import toolbox.general as general_toolbox from parallel.models import ParallelRunnerManagerFactory from parallel.exceptions import NoMoreAliveRunnersException from pogo.models import PogoRunnerFactory from pipelines.template_pipeline import TrackhubCreationPogoBasedDirector, DirectorConfigurationManager # Globals __configuration_file = None __pipeline_arguments = None __pipeline_director = None # Pipeline properties access def set_configuration_file(config_file): global __configuration_file if __configuration_file is None: __configuration_file = config_file return __configuration_file def set_pipeline_arguments(pipeline_arguments): global __pipeline_arguments if __pipeline_arguments is None: __pipeline_arguments = pipeline_arguments return __pipeline_arguments def get_pipeline_director(): global __pipeline_director if __pipeline_director is None: __pipeline_director = TrackhubCreatorForProject(config_manager.read_config_from_file(__configuration_file), __configuration_file, __pipeline_arguments) return __pipeline_director class ConfigManager(DirectorConfigurationManager): # Command Line Arguments for this pipeline look like # # This is a JSON formatted file that contains all the relevant information needed for processing the project # # data and create its trackhub # project_data_file=project_data.json # Command Line Argument keys _CONFIG_COMMAND_LINE_ARGUMENT_KEY_PROJECT_DATA_FILE = 'project_data_file' def __init__(self, configuration_object, configuration_file, pipeline_arguments): super(ConfigManager, self).__init__(configuration_object, configuration_file, pipeline_arguments) # Lazy Process command line arguments self.__pipeline_arguments_object = None self.__running_mode = None def _get_allowed_configuration_keys(self): return {self._CONFIG_COMMAND_LINE_ARGUMENT_KEY_PROJECT_DATA_FILE} def get_project_data_file_path(self): return self._get_value_for_pipeline_argument_key(self._CONFIG_COMMAND_LINE_ARGUMENT_KEY_PROJECT_DATA_FILE) def get_file_path_trackhub_creation_report(self): return os.path.join(config_manager.get_app_config_manager().get_session_working_dir(), "trackhub_creation.report") def get_project_description_url(self): # TODO - This could be made configurable in the future return "docs/index.html" # Models for dealing with the data file that describes the project class ProjectTrackDescriptor: """ This class models the tracks that are defined in the given project under the "trackMaps" section """ # Project Data File keys relative to every TrackMap object _PROJECT_DATA_FILE_KEY_TRACK_NAME = 'trackName' _PROJECT_DATA_FILE_KEY_TRACK_SHORT_LABEL = 'trackShortLabel' _PROJECT_DATA_FILE_KEY_TRACK_LONG_LABEL = 'trackLongLabel' _PROJECT_DATA_FILE_KEY_TRACK_SPECIES = 'trackSpecies' _PROJECT_DATA_FILE_KEY_TRACK_POGO_FILE_PATH = 'pogoFile' def __init__(self, project_track_descriptor_object): self.__project_track_descriptor_object = project_track_descriptor_object def _get_value_for_key(self, key, default=""): if self.__project_track_descriptor_object and (key in self.__project_track_descriptor_object): return self.__project_track_descriptor_object[key] return default def get_track_name(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_NAME) def get_track_short_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_SHORT_LABEL) def get_track_long_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_LONG_LABEL) def get_track_species(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_SPECIES) def get_track_file_path_pogo(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACK_POGO_FILE_PATH) class ProjectTrackhubDescriptor: """ This class models the trackhub as described by the given project description data, see sample project description information at the top of this module """ # Project Data File keys _PROJECT_DATA_FILE_KEY_TRACKHUB_NAME = 'trackHubName' _PROJECT_DATA_FILE_KEY_TRACKHUB_SHORT_LABEL = 'trackHubShortLabel' _PROJECT_DATA_FILE_KEY_TRACKHUB_LONG_LABEL = 'trackHubLongLabel' _PROJECT_DATA_FILE_KEY_TRACKHUB_HUB_TYPE = 'trackHubType' _PROJECT_DATA_FILE_KEY_TRACKHUB_EMAIL = 'trackHubEmail' _PROJECT_DATA_FILE_KEY_TRACKHUB_INTERNAL_ABSOLUTE_PATH = 'trackHubInternalAbsolutePath' _PROJECT_DATA_FILE_KEY_TRACKHUB_REPORT_FILE = 'trackhubCreationReportFilePath' _PROJECT_DATA_FILE_KEY_TRACKHUB_SECTION_TRACKMAPS = 'trackMaps' def __init__(self, project_data_file_path): self.__project_data_file_path = project_data_file_path self.__project_data_object = None self.__project_tracks_descriptors = None def _get_project_data_object(self): if not self.__project_data_object: self.__project_data_object = general_toolbox.read_json(self.__project_data_file_path) return self.__project_data_object def _get_value_for_key(self, key, default=""): # TODO - I should start thinking about refactoring this out if key in self._get_project_data_object(): return self._get_project_data_object()[key] return default def get_trackhub_name(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_NAME, os.path.basename(self.__project_data_file_path)) def get_trackhub_short_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_SHORT_LABEL, "--- NO SHORT LABEL HAS BEEN DEFINED FOR THIS TRACKHUB ---") def get_trackhub_long_label(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_LONG_LABEL, "--- NO LONG LABEL HAS BEEN DEFINED FOR THIS TRACKHUB ---") def get_trackhub_hub_type(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_HUB_TYPE, "PROTEOMICS") def get_trackhub_email(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_EMAIL, "pride-support@ebi.ac.uk") def get_trackhub_destination_path(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_INTERNAL_ABSOLUTE_PATH) def get_trackhub_project_defined_tracks(self): if not self.__project_tracks_descriptors: # Default value is an empty list of tracks self.__project_tracks_descriptors = [] data_file_project_track_description_objects = \ self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_SECTION_TRACKMAPS) if data_file_project_track_description_objects: self.__project_tracks_descriptors = \ [ProjectTrackDescriptor(data_file_project_track_description_object) for data_file_project_track_description_object in data_file_project_track_description_objects] return self.__project_tracks_descriptors def get_trackhub_report_file_path(self): return self._get_value_for_key(self._PROJECT_DATA_FILE_KEY_TRACKHUB_REPORT_FILE) class PipelineResult: """ This class models the pipeline report that will be made available at the end of the pipeline execution """ _VALUE_STATUS_SUCCESS = 'SUCCESS' _VALUE_STATUS_ERROR = 'ERROR' _VALUE_STATUS_WARNING = 'WARNING' def __init__(self): self.status = self._VALUE_STATUS_SUCCESS self.error_messages = [] self.success_messages = [] self.warning_messages = [] self.hub_descriptor_file_path = "" # Absolute file path to the folder that represents the running session of the pipeline self.file_path_pipeline_session = "" # Absolute file path to the log files that belong to the running session of the pipeline self.file_path_log_files = [] # Ensembl Release used for creating the trackhub self.ensembl_release = "" def set_status_error(self): self.status = self._VALUE_STATUS_ERROR def add_error_message(self, error_message): """ Adds an error message to the pipeline report. As this report is the final word on how the pipeline performed, the first error message that is set will set the status of the pipeline as 'failed' :param error_message: error message :return: no return value """ # This is the report on the final result from running the pipeline self.set_status_error() self.error_messages.append(error_message) def add_success_message(self, success_message): """ This will add messages to the pipeline report, but it doesn't change its status. :param success_message: message to add :return: no return value """ self.success_messages.append(success_message) def add_warning_message(self, warning_message): """ This will add warning messages to the pipeline report, setting the status to 'WARNING' if it wasn't in 'ERROR' status. :param warning_message: warning message to add :return: no return value """ self.warning_messages.append(warning_message) if self.status != self._VALUE_STATUS_ERROR: self.status = self._VALUE_STATUS_WARNING def add_log_files(self, log_files): """ Add all the log files produce by the pipeline to its final report :param log_files: a list of log files to add :return: no return value """ self.file_path_log_files.extend(log_files) def __str__(self): return json.dumps({'status': self.status, 'success_messages': self.success_messages, 'warning_messages': self.warning_messages, 'error_messages': self.error_messages, 'hub_descriptor_file_path': self.hub_descriptor_file_path, 'ensembl_release': self.ensembl_release, 'pipeline_session_working_dir': self.file_path_pipeline_session, 'log_files': self.file_path_log_files}) class TrackhubCreatorForProject(TrackhubCreationPogoBasedDirector): """ Given a project description file that contains the information specified at the beginning of this module, this pipeline creates a trackhub for all the project defined tracks """ def __init__(self, configuration_object, configuration_file, pipeline_arguments): runner_id = "{}-{}".format(__name__, time.time()) super().__init__(runner_id) self.__config_manager = ConfigManager(configuration_object, configuration_file, pipeline_arguments) self.__project_trackhub_descriptor = None # Only the valid project tracks will be processed for being included in the trackhub self.__valid_project_tracks = None self.__indexed_project_tracks_by_taxonomy_id = None # Pipeline result object self.__pipeline_result_object = PipelineResult() self.__trackhub_descriptor = None self.__trackhub_exporter = None def __get_valid_project_tracks(self): """ This helper creates a list of valid trackhub tracks from the given project, i.e. tracks that meet this cirteria: - Its taxonomy ID is available on Ensembl The list of valid tracks is cached, so it won't change between multiple calls :return: a list of valid trackhub tracks for the given project """ if not self.__valid_project_tracks: self.__valid_project_tracks = [] ensembl_service = ensembl.service.get_service() for project_track_descriptor in self.__project_trackhub_descriptor.get_trackhub_project_defined_tracks(): if ensembl_service.get_species_data_service().get_species_entry_for_taxonomy_id( project_track_descriptor.get_track_species()): self.__valid_project_tracks.append(project_track_descriptor) else: self.__pipeline_result_object \ .add_warning_message("MISSING Taxonomy #{} on Ensembl" .format(project_track_descriptor.get_track_species())) return self.__valid_project_tracks def __get_index_project_track_for_taxonomy_id(self): """ Get the project tracks indexed by taxonomy id :return: map (taxonomy_id, project_track) """ if not self.__indexed_project_tracks_by_taxonomy_id: self.__indexed_project_tracks_by_taxonomy_id = {} self._get_logger().debug("Indexing #{} valid project tracks".format(len(self.__get_valid_project_tracks()))) for project_track in self.__get_valid_project_tracks(): if project_track.get_track_species() in self.__indexed_project_tracks_by_taxonomy_id: self._get_logger() \ .error("ERROR DUPLICATED TAXONOMY indexing project track '{}', " "another project track, '{}' is in the index - SKIP -" .format(project_track.get_track_name(), self.__indexed_project_tracks_by_taxonomy_id[ project_track.get_track_species()].get_track_name())) continue self.__indexed_project_tracks_by_taxonomy_id[project_track.get_track_species()] = project_track self._get_logger().debug("Project track '{}' indexed with taxonomy ID '{}'" .format(project_track.get_track_name(), project_track.get_track_species())) return self.__indexed_project_tracks_by_taxonomy_id def __get_project_track_for_taxonomy_id(self, taxonomy_id): if taxonomy_id in self.__get_index_project_track_for_taxonomy_id(): return self.__get_index_project_track_for_taxonomy_id()[taxonomy_id] # I know, we should never return None return None def _before(self): # Set Pipeline Session working directory self.__pipeline_result_object.file_path_pipeline_session = \ config_manager.get_app_config_manager().get_session_working_dir() # Add this pipeline session log files to the final report self.__pipeline_result_object.add_log_files(config_manager.get_app_config_manager().get_session_log_files()) # Add information about the Ensembl Release being used self.__pipeline_result_object.ensembl_release = str(ensembl.service.get_service().get_release_number()) if self.__config_manager.get_project_data_file_path(): self._get_logger().info("Reading Project Trackhub Descriptor from file at '{}'" .format(self.__config_manager.get_project_data_file_path())) self.__project_trackhub_descriptor = \ ProjectTrackhubDescriptor(self.__config_manager.get_project_data_file_path()) # Check that the destination folder exists if not os.path.isdir(self.__project_trackhub_descriptor.get_trackhub_destination_path()): error_message = "Trackhub destination path NOT VALID, '{}'" \ .format(self.__project_trackhub_descriptor.get_trackhub_destination_path()) self._get_logger().error(error_message) self.__pipeline_result_object.add_error_message(error_message) self.set_pipeline_status_fail() return False # Check valid project tracks if not self.__get_valid_project_tracks(): # It makes no sense to go ahead if this project has no valid tracks error_message = "Project Trackhub contains NO VALID TRACKS" self._get_logger().error(error_message) self.__pipeline_result_object.add_error_message(error_message) self.set_pipeline_status_fail() return False return True error_message = "INVALID / MISSING Project Trackhub Descriptor file, '{}'" \ .format(self.__config_manager.get_project_data_file_path()) self._get_logger().error(error_message) self.__pipeline_result_object.add_error_message(error_message) self.set_pipeline_status_fail() return False # Helpers # Override def _get_pogo_results_for_input_data(self): # TODO - Needs to be extended for abstracting from results files from '-mm' parameter use # This is a map (project_track_descriptor, PogoRunResult) pogo_run_results = {} parallel_run_manager = ParallelRunnerManagerFactory.get_parallel_runner_manager() for project_track in self.__get_valid_project_tracks(): pogo_input_file_path = project_track.get_track_file_path_pogo() pogo_protein_sequence_file_path = \ self._get_pogo_protein_sequence_file_path_for_taxonomy(project_track.get_track_species()) pogo_gtf_file_path = self._get_pogo_gtf_file_path_for_taxonomy(project_track.get_track_species()) parallel_run_manager.add_runner(PogoRunnerFactory.get_pogo_runner(project_track.get_track_species(), pogo_input_file_path, pogo_protein_sequence_file_path, pogo_gtf_file_path)) # Run PoGo with '-mm 1' parallel_run_manager.add_runner(PogoRunnerFactory.get_pogo_runner(project_track.get_track_species(), pogo_input_file_path, pogo_protein_sequence_file_path, pogo_gtf_file_path, '1')) self._get_logger().debug("Running PoGo for #{} Project Tracks".format(len(self.__get_valid_project_tracks()))) parallel_run_manager.start_runners() self._get_logger().debug("Processing PoGo runners results") try: while True: pogo_runner = parallel_run_manager.get_next_finished_runner() if not pogo_runner.is_success(): message = "PoGo FAILED running on file '{}', taxonomy #{} - SKIPPING its results" \ .format(pogo_runner.pogo_input_file, pogo_runner.ncbi_taxonomy_id) self._get_logger().error(message) self.__pipeline_result_object.add_warning_message(message) continue if pogo_runner.ncbi_taxonomy_id not in pogo_run_results: pogo_run_results[pogo_runner.ncbi_taxonomy_id] = [] self._get_logger().info("PoGo SUCCESS for taxonomy '{}', input file '{}'" .format(pogo_runner.ncbi_taxonomy_id, pogo_runner.pogo_input_file)) # Every taxonomy now has a list of PoGo run results pogo_run_results[pogo_runner.ncbi_taxonomy_id].append(pogo_runner.get_pogo_run_result()) except NoMoreAliveRunnersException as e: self._get_logger().debug("All PoGo runners results collected!") if len(pogo_run_results) == 0: message = "ALL PoGo files FAILED for this project!!!" self._get_logger().error(message) self.__pipeline_result_object.add_error_message(message) self.set_pipeline_status_fail() return pogo_run_results # Override def _get_trackhub_descriptor(self): if not self.__trackhub_descriptor: # TODO - This iteration has no description URL for the project trackhub, we should include it in the project # TODO - input json file the pipeline gets as a parameter self.__trackhub_descriptor = \ trackhubs.TrackHub(self.__project_trackhub_descriptor.get_trackhub_name(), self.__project_trackhub_descriptor.get_trackhub_short_label(), self.__project_trackhub_descriptor.get_trackhub_long_label(), self.__project_trackhub_descriptor.get_trackhub_email(), self.__config_manager.get_project_description_url()) return self.__trackhub_descriptor # Override def _get_trackhub_track_for_taxonomy_id(self, taxonomy_id, pogo_run_result): # Default values trackhub_track_title = "- NOT PROVIDED -" trackhub_track_short_label = "- NOT PROVIDED -" trackhub_track_long_label = "- NOT PROVIDED -" # Fill in the project trackhub track information if found project_track = self.__get_project_track_for_taxonomy_id(taxonomy_id) if project_track: trackhub_track_title = project_track.get_track_name() trackhub_track_short_label = project_track.get_track_short_label() trackhub_track_long_label = project_track.get_track_long_label() trackhub_track_title = "{} {}"\ .format(trackhub_track_title, self._get_trackhub_track_name_modifiers_based_on_pogo_run(pogo_run_result)) return trackhubs.BaseTrack(trackhub_track_title, trackhub_track_short_label, trackhub_track_long_label) # Override def _get_trackhub_exporter(self): if not self.__trackhub_exporter: self._get_logger().info("Default trackhub exporter - 'TrackHubLocalFilesystemExporter'") self.__trackhub_exporter = trackhubs.TrackHubLocalFilesystemExporter() return self.__trackhub_exporter # Override def _prepare_trackhub_destination_folder(self, trackhub_exporter): self._get_logger().info("Trackhub destination folder ---> '{}'" .format(self.__project_trackhub_descriptor.get_trackhub_destination_path())) trackhub_exporter.track_hub_destination_folder = \ self.__project_trackhub_descriptor.get_trackhub_destination_path() def _run_pipeline(self): if not self.is_pipeline_status_ok(): error_message = "--- ABORT Pipeline Execution ---, the previous stage failed" self._get_logger().warning(error_message) self.__pipeline_result_object.add_error_message(error_message) return False # Use default trackhub creation workflow try: self._create_trackhub() except Exception as e: # I know this is too generic but, for this iteration of the software it is completely fine self.__pipeline_result_object.add_error_message(str(e)) self.set_pipeline_status_fail() return False # Fill in the pipeline report self.__pipeline_result_object.hub_descriptor_file_path = \ self._get_trackhub_exporter() \ .export_summary \ .track_hub_descriptor_file_path for message in self._get_trackhub_exporter().export_summary.warnings: self.__pipeline_result_object.add_warning_message(message) for message in self._get_trackhub_exporter().export_summary.errors: self.__pipeline_result_object.add_error_message(message) if self._get_trackhub_exporter().export_summary.errors: self.set_pipeline_status_fail() return True def _after(self): """ Dump to a file the pipeline report :return: no return value """ if not self.is_pipeline_status_ok(): self._get_logger().warning("This Pipeline is finishing with NON-OK status.") report_files = [self.__config_manager.get_file_path_trackhub_creation_report()] if self.__project_trackhub_descriptor \ and self.__project_trackhub_descriptor.get_trackhub_report_file_path(): report_files.append(self.__project_trackhub_descriptor.get_trackhub_report_file_path()) for report_file in report_files: self._get_logger().info("Dumping Pipeline Report to '{}'".format(report_file)) with open(report_file, 'w') as f: f.write(str(self.__pipeline_result_object)) return True if __name__ == '__main__': print("ERROR: This script is part of a pipeline collection and it is not meant to be run in stand alone mode")
16
0
b76df0159986f1a2e79043c17c75ba6fb06ea156
8,516
py
Python
tests/extractors/test_protein.py
KalinNonchev/kipoiseq
38d1134885e401198acd3883286dc55627cf12a6
[ "MIT" ]
2
2019-12-16T17:13:04.000Z
2021-07-29T12:05:47.000Z
tests/extractors/test_protein.py
KalinNonchev/kipoiseq
38d1134885e401198acd3883286dc55627cf12a6
[ "MIT" ]
117
2020-04-22T12:46:45.000Z
2021-08-02T04:40:58.000Z
tests/extractors/test_protein.py
KalinNonchev/kipoiseq
38d1134885e401198acd3883286dc55627cf12a6
[ "MIT" ]
null
null
null
import pytest from pytest_mock import mocker import pandas as pd from kipoiseq.transforms.functional import translate, rc_dna from kipoiseq.dataclasses import Interval, Variant from kipoiseq.extractors.protein import cut_transcript_seq, gtf_row2interval, \ CDSFetcher, TranscriptSeqExtractor, ProteinSeqExtractor, \ ProteinVCFSeqExtractor, SingleSeqProteinVCFSeqExtractor, \ SingleVariantProteinVCFSeqExtractor gtf_file = 'tests/data/sample_1_protein.gtf' fasta_file = 'tests/data/demo_dna_seq.fa' transcript_id = 'enst_test1' vcf_file = 'tests/data/singleVar_vcf_enst_test2.vcf.gz' intervals = [ Interval('22', 580, 596, strand='+', attrs={'tag': 'cds_end_NF'}), Interval('22', 597, 610, strand='+', attrs={'tag': 'cds_end_NF'}) ] def test_cut_seq(): seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'cds_end_NF') assert len(seq) == 6 seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'cds_end_NF,cds_start_NF') assert len(seq) == 3 seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'cds_start_NF') assert len(seq) == 9 seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'no_tag') assert len(seq) == 3 def test_gtf_row2interval(): row = pd.Series({ 'Chromosome': '22', 'Start': 10, 'End': 20, 'Strand': '-', 'tag': 'cds_end_NF' }) expected_interval = Interval(chrom='22', start=10, end=20, name='', strand='-', attrs={'tag': 'cds_end_NF'}) assert gtf_row2interval(row) == expected_interval def test_CDSFetcher__read_cds(): cds = CDSFetcher._read_cds(gtf_file, duplicate_attr=True) assert cds.shape[0] == 7 assert cds.iloc[0].Chromosome == '22' assert cds.iloc[0].Start == 598 assert cds.iloc[0].End == 3050 assert cds.iloc[3].Start == 3 assert cds.iloc[3].End == 300 @pytest.fixture def cds_fetcher(): return CDSFetcher(gtf_file) def test_CDSFetcher__len__(cds_fetcher): assert len(cds_fetcher) == 3 def test_CDSFetcher_get_cds(cds_fetcher): intervals = cds_fetcher.get_cds(transcript_id) intervals[0] == Interval(chrom='22', start=598, end=3196, name='', strand='+') # TODO: Improve testcase with adding transcript with 2 cds @pytest.fixture def transcript_seq_extractor(): return TranscriptSeqExtractor(gtf_file, fasta_file) def test_get_protein_seq(transcript_seq_extractor): transcript_id = 'enst_test2' seq = transcript_seq_extractor.get_protein_seq(transcript_id) txt_file = 'tests/data/Output_singleSeq_vcf_enst_test2.txt' expected_seq = open(txt_file).readline() assert seq[1:] == expected_seq[1:] # no expected mutation here def test_TranscriptSeqExtractor_prepare_seq(): seqs = ['ATCGATG'] assert 'ATCGAT' == TranscriptSeqExtractor._prepare_seq( seqs, '+', 'cds_end_NF') assert 'CATCGA' == TranscriptSeqExtractor._prepare_seq( seqs, '-', 'cds_end_NF') def test_TranscriptSeqExtractor_get_seq(transcript_seq_extractor): seq = transcript_seq_extractor.get_seq(transcript_id) assert len(seq) == 3196 - 598 def test_TranscriptSeqExtractor_get_item(transcript_seq_extractor): assert transcript_seq_extractor[0] == transcript_seq_extractor.get_seq( transcript_id) @pytest.fixture def protein_seq_extractor(): return ProteinSeqExtractor(gtf_file, fasta_file) def test_ProteinSeqExtractor_prepare_seq(protein_seq_extractor): seqs = ['ATCGATG'] pro_seq = protein_seq_extractor._prepare_seq(seqs, '+', 'cds_end_NF') assert pro_seq == 'ID' pro_seq = protein_seq_extractor._prepare_seq(seqs, '-', 'cds_end_NF') assert pro_seq == 'HR' def test_ProteinVCFSeqExtractor__unstrand(): unstrand_intervals = ProteinVCFSeqExtractor._unstrand(intervals) assert all(i.strand == '.' for i in unstrand_intervals) # TODO: write test for with sample_id @pytest.fixture def protein_vcf_seq(mocker): extractor = ProteinVCFSeqExtractor(gtf_file, fasta_file, vcf_file) extractor.extract_query = mocker.MagicMock( return_value=iter((['ATC', 'GATG'], ['CATC', 'GAT']))) return extractor def test_ProteinVCFSeqExtractor_extract_cds(protein_vcf_seq): protein_seqs = list(protein_vcf_seq.extract_cds(intervals)) assert protein_seqs[0] == 'ID' assert protein_seqs[1] == 'HR' query = list(protein_vcf_seq.extract_query .call_args[0][0].variant_intervals) variants = list(query[0][0]) assert len(variants) == 1 assert variants[0].pos == 596 interval = query[0][1] assert interval.start == 580 variants = list(query[1][0]) assert len(variants) == 1 assert variants[0].pos == 598 interval = query[1][1] assert interval.start == 597 def test_ProteinVCFSeqExtractor_extract(protein_vcf_seq): transcript_id = 'enst_test2' protein_seqs = list(protein_vcf_seq.extract(transcript_id)) assert protein_seqs[0] == 'HR' assert protein_seqs[1] == 'ID' @pytest.fixture def single_seq_protein(): vcf_file = 'tests/data/singleVar_vcf_enst_test2.vcf.gz' return SingleSeqProteinVCFSeqExtractor(gtf_file, fasta_file, vcf_file) def test_SingleSeqProteinVCFSeqExtractor_extract(single_seq_protein, transcript_seq_extractor): transcript_id = 'enst_test2' seq = single_seq_protein.extract(transcript_id) txt_file = 'tests/data/Output_singleSeq_vcf_enst_test2.txt' expected_seq = open(txt_file).readline() assert seq == expected_seq vcf_file = 'tests/data/singleVar_vcf_enst_test1_diff_type_of_variants.vcf.gz' transcript_id = 'enst_test1' single_seq_protein = SingleSeqProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) seq = single_seq_protein.extract(transcript_id) ref_seq = transcript_seq_extractor.get_protein_seq(transcript_id) assert len(seq) == len(ref_seq) count = diff_between_two_seq(seq, ref_seq) assert count == 1, 'Expected diff of 1 AA, but it was: '+str(count) vcf_file = 'tests/data/singleSeq_vcf_enst_test2.vcf.gz' single_seq_protein = SingleSeqProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) seq = list(single_seq_protein.extract_all()) assert len(seq) == 0 @pytest.fixture def single_variant_seq(): vcf_file = 'tests/data/singleVar_vcf_enst_test2.vcf.gz' return SingleVariantProteinVCFSeqExtractor(gtf_file, fasta_file, vcf_file) def diff_between_two_seq(seq1, seq2): count = 0 for i in range(len(seq1)): if seq1[i] != seq2[i]: count += 1 return count def test_SingleVariantProteinVCFSeqExtractor_extract(single_variant_seq, transcript_seq_extractor): transcript_id = 'enst_test2' seqs = list(single_variant_seq.extract(transcript_id)) txt_file = 'tests/data/Output_singleVar_vcf_enst_test2.txt' expected_seq = open(txt_file).read().splitlines() assert seqs[0] == expected_seq[0] assert seqs[1] == expected_seq[1] assert seqs[2] == expected_seq[2] seqs = list(single_variant_seq.extract_all()) counter = 0 for tr_id, t_id_seqs in seqs: t_id_seqs = list(t_id_seqs) counter += len(t_id_seqs) for i, seq in enumerate(t_id_seqs): assert seq == expected_seq[i] assert tr_id == 'enst_test2' assert counter == 3, 'Number of variants in vcf 3, but # of seq was: ' + \ str(counter) transcript_id = ['enst_test2', 'enst_test1'] seqs = single_variant_seq.extract_list(transcript_id) for tr_id, t_id_seqs in seqs: assert tr_id in ['enst_test2', 'enst_test1'], tr_id vcf_file = 'tests/data/singleVar_vcf_enst_test1_diff_type_of_variants.vcf.gz' transcript_id = 'enst_test1' single_var_protein = SingleVariantProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) seqs = list(single_var_protein.extract(transcript_id)) ref_seq = transcript_seq_extractor.get_protein_seq(transcript_id) assert len(seqs) == 1 for seq in seqs: assert len(seq) == len(ref_seq) count = diff_between_two_seq(seq, ref_seq) assert count == 1, 'Expected diff of 1 AA, but it was: '+str(count) vcf_file = 'tests/data/singleSeq_vcf_enst_test2.vcf.gz' single_var_protein = SingleVariantProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) length = 0 seqs = list(single_var_protein.extract_all()) for t_id in seqs: length = len(list(t_id)) assert length == 0 # TODO: add for all proteins.pep.all.fa
31.308824
99
0.705965
import pytest from pytest_mock import mocker import pandas as pd from kipoiseq.transforms.functional import translate, rc_dna from kipoiseq.dataclasses import Interval, Variant from kipoiseq.extractors.protein import cut_transcript_seq, gtf_row2interval, \ CDSFetcher, TranscriptSeqExtractor, ProteinSeqExtractor, \ ProteinVCFSeqExtractor, SingleSeqProteinVCFSeqExtractor, \ SingleVariantProteinVCFSeqExtractor gtf_file = 'tests/data/sample_1_protein.gtf' fasta_file = 'tests/data/demo_dna_seq.fa' transcript_id = 'enst_test1' vcf_file = 'tests/data/singleVar_vcf_enst_test2.vcf.gz' intervals = [ Interval('22', 580, 596, strand='+', attrs={'tag': 'cds_end_NF'}), Interval('22', 597, 610, strand='+', attrs={'tag': 'cds_end_NF'}) ] def test_cut_seq(): seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'cds_end_NF') assert len(seq) == 6 seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'cds_end_NF,cds_start_NF') assert len(seq) == 3 seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'cds_start_NF') assert len(seq) == 9 seq = 'ATCGATG' seq = cut_transcript_seq(seq, 'no_tag') assert len(seq) == 3 def test_gtf_row2interval(): row = pd.Series({ 'Chromosome': '22', 'Start': 10, 'End': 20, 'Strand': '-', 'tag': 'cds_end_NF' }) expected_interval = Interval(chrom='22', start=10, end=20, name='', strand='-', attrs={'tag': 'cds_end_NF'}) assert gtf_row2interval(row) == expected_interval def test_CDSFetcher__read_cds(): cds = CDSFetcher._read_cds(gtf_file, duplicate_attr=True) assert cds.shape[0] == 7 assert cds.iloc[0].Chromosome == '22' assert cds.iloc[0].Start == 598 assert cds.iloc[0].End == 3050 assert cds.iloc[3].Start == 3 assert cds.iloc[3].End == 300 @pytest.fixture def cds_fetcher(): return CDSFetcher(gtf_file) def test_CDSFetcher__len__(cds_fetcher): assert len(cds_fetcher) == 3 def test_CDSFetcher_get_cds(cds_fetcher): intervals = cds_fetcher.get_cds(transcript_id) intervals[0] == Interval(chrom='22', start=598, end=3196, name='', strand='+') # TODO: Improve testcase with adding transcript with 2 cds @pytest.fixture def transcript_seq_extractor(): return TranscriptSeqExtractor(gtf_file, fasta_file) def test_get_protein_seq(transcript_seq_extractor): transcript_id = 'enst_test2' seq = transcript_seq_extractor.get_protein_seq(transcript_id) txt_file = 'tests/data/Output_singleSeq_vcf_enst_test2.txt' expected_seq = open(txt_file).readline() assert seq[1:] == expected_seq[1:] # no expected mutation here def test_TranscriptSeqExtractor_prepare_seq(): seqs = ['ATCGATG'] assert 'ATCGAT' == TranscriptSeqExtractor._prepare_seq( seqs, '+', 'cds_end_NF') assert 'CATCGA' == TranscriptSeqExtractor._prepare_seq( seqs, '-', 'cds_end_NF') def test_TranscriptSeqExtractor_get_seq(transcript_seq_extractor): seq = transcript_seq_extractor.get_seq(transcript_id) assert len(seq) == 3196 - 598 def test_TranscriptSeqExtractor_get_item(transcript_seq_extractor): assert transcript_seq_extractor[0] == transcript_seq_extractor.get_seq( transcript_id) @pytest.fixture def protein_seq_extractor(): return ProteinSeqExtractor(gtf_file, fasta_file) def test_ProteinSeqExtractor_prepare_seq(protein_seq_extractor): seqs = ['ATCGATG'] pro_seq = protein_seq_extractor._prepare_seq(seqs, '+', 'cds_end_NF') assert pro_seq == 'ID' pro_seq = protein_seq_extractor._prepare_seq(seqs, '-', 'cds_end_NF') assert pro_seq == 'HR' def test_ProteinVCFSeqExtractor__unstrand(): unstrand_intervals = ProteinVCFSeqExtractor._unstrand(intervals) assert all(i.strand == '.' for i in unstrand_intervals) # TODO: write test for with sample_id @pytest.fixture def protein_vcf_seq(mocker): extractor = ProteinVCFSeqExtractor(gtf_file, fasta_file, vcf_file) extractor.extract_query = mocker.MagicMock( return_value=iter((['ATC', 'GATG'], ['CATC', 'GAT']))) return extractor def test_ProteinVCFSeqExtractor_extract_cds(protein_vcf_seq): protein_seqs = list(protein_vcf_seq.extract_cds(intervals)) assert protein_seqs[0] == 'ID' assert protein_seqs[1] == 'HR' query = list(protein_vcf_seq.extract_query .call_args[0][0].variant_intervals) variants = list(query[0][0]) assert len(variants) == 1 assert variants[0].pos == 596 interval = query[0][1] assert interval.start == 580 variants = list(query[1][0]) assert len(variants) == 1 assert variants[0].pos == 598 interval = query[1][1] assert interval.start == 597 def test_ProteinVCFSeqExtractor_extract(protein_vcf_seq): transcript_id = 'enst_test2' protein_seqs = list(protein_vcf_seq.extract(transcript_id)) assert protein_seqs[0] == 'HR' assert protein_seqs[1] == 'ID' @pytest.fixture def single_seq_protein(): vcf_file = 'tests/data/singleVar_vcf_enst_test2.vcf.gz' return SingleSeqProteinVCFSeqExtractor(gtf_file, fasta_file, vcf_file) def test_SingleSeqProteinVCFSeqExtractor_extract(single_seq_protein, transcript_seq_extractor): transcript_id = 'enst_test2' seq = single_seq_protein.extract(transcript_id) txt_file = 'tests/data/Output_singleSeq_vcf_enst_test2.txt' expected_seq = open(txt_file).readline() assert seq == expected_seq vcf_file = 'tests/data/singleVar_vcf_enst_test1_diff_type_of_variants.vcf.gz' transcript_id = 'enst_test1' single_seq_protein = SingleSeqProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) seq = single_seq_protein.extract(transcript_id) ref_seq = transcript_seq_extractor.get_protein_seq(transcript_id) assert len(seq) == len(ref_seq) count = diff_between_two_seq(seq, ref_seq) assert count == 1, 'Expected diff of 1 AA, but it was: '+str(count) vcf_file = 'tests/data/singleSeq_vcf_enst_test2.vcf.gz' single_seq_protein = SingleSeqProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) seq = list(single_seq_protein.extract_all()) assert len(seq) == 0 @pytest.fixture def single_variant_seq(): vcf_file = 'tests/data/singleVar_vcf_enst_test2.vcf.gz' return SingleVariantProteinVCFSeqExtractor(gtf_file, fasta_file, vcf_file) def diff_between_two_seq(seq1, seq2): count = 0 for i in range(len(seq1)): if seq1[i] != seq2[i]: count += 1 return count def test_SingleVariantProteinVCFSeqExtractor_extract(single_variant_seq, transcript_seq_extractor): transcript_id = 'enst_test2' seqs = list(single_variant_seq.extract(transcript_id)) txt_file = 'tests/data/Output_singleVar_vcf_enst_test2.txt' expected_seq = open(txt_file).read().splitlines() assert seqs[0] == expected_seq[0] assert seqs[1] == expected_seq[1] assert seqs[2] == expected_seq[2] seqs = list(single_variant_seq.extract_all()) counter = 0 for tr_id, t_id_seqs in seqs: t_id_seqs = list(t_id_seqs) counter += len(t_id_seqs) for i, seq in enumerate(t_id_seqs): assert seq == expected_seq[i] assert tr_id == 'enst_test2' assert counter == 3, 'Number of variants in vcf 3, but # of seq was: ' + \ str(counter) transcript_id = ['enst_test2', 'enst_test1'] seqs = single_variant_seq.extract_list(transcript_id) for tr_id, t_id_seqs in seqs: assert tr_id in ['enst_test2', 'enst_test1'], tr_id vcf_file = 'tests/data/singleVar_vcf_enst_test1_diff_type_of_variants.vcf.gz' transcript_id = 'enst_test1' single_var_protein = SingleVariantProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) seqs = list(single_var_protein.extract(transcript_id)) ref_seq = transcript_seq_extractor.get_protein_seq(transcript_id) assert len(seqs) == 1 for seq in seqs: assert len(seq) == len(ref_seq) count = diff_between_two_seq(seq, ref_seq) assert count == 1, 'Expected diff of 1 AA, but it was: '+str(count) vcf_file = 'tests/data/singleSeq_vcf_enst_test2.vcf.gz' single_var_protein = SingleVariantProteinVCFSeqExtractor( gtf_file, fasta_file, vcf_file) length = 0 seqs = list(single_var_protein.extract_all()) for t_id in seqs: length = len(list(t_id)) assert length == 0 # TODO: add for all proteins.pep.all.fa
0
0
eed5699e06d3cac61b4a945b53a1004046c608f3
1,026
py
Python
task3/task3.py
ksmirenko/ml-homework
a5e558352ffc332ad5e40526dda21f205718a203
[ "MIT" ]
1
2020-08-05T08:06:33.000Z
2020-08-05T08:06:33.000Z
task3/task3.py
ksmirenko/ml-homework
a5e558352ffc332ad5e40526dda21f205718a203
[ "MIT" ]
null
null
null
task3/task3.py
ksmirenko/ml-homework
a5e558352ffc332ad5e40526dda21f205718a203
[ "MIT" ]
null
null
null
from PIL import Image import numpy as np # Works when launched from terminal # noinspection PyUnresolvedReferences from k_means import k_means input_image_file = 'lena.jpg' output_image_prefix = 'out_lena' n_clusters = [2, 3, 5] max_iterations = 100 launch_count = 3 def main(): # Read input image image = np.array(Image.open(input_image_file)) X = image.reshape((image.shape[0] * image.shape[1], image.shape[2])) for k in n_clusters: print(f"{k} clusters") # 'Compress' image using K-means centroids, clustered = k_means(X, k=k, max_iterations=max_iterations, launch_count=launch_count) new_X = np.array([centroids[cluster_index] for cluster_index in clustered]) new_X = new_X.astype(np.uint8) # Write output image new_image = new_X.reshape(image.shape) output_image_name = f"{output_image_prefix}_{k}.jpg" Image.fromarray(new_image).save(output_image_name) print(f"Saved {output_image_name}") print("Done.") main()
27.72973
104
0.692008
from PIL import Image import numpy as np # Works when launched from terminal # noinspection PyUnresolvedReferences from k_means import k_means input_image_file = 'lena.jpg' output_image_prefix = 'out_lena' n_clusters = [2, 3, 5] max_iterations = 100 launch_count = 3 def main(): # Read input image image = np.array(Image.open(input_image_file)) X = image.reshape((image.shape[0] * image.shape[1], image.shape[2])) for k in n_clusters: print(f"{k} clusters") # 'Compress' image using K-means centroids, clustered = k_means(X, k=k, max_iterations=max_iterations, launch_count=launch_count) new_X = np.array([centroids[cluster_index] for cluster_index in clustered]) new_X = new_X.astype(np.uint8) # Write output image new_image = new_X.reshape(image.shape) output_image_name = f"{output_image_prefix}_{k}.jpg" Image.fromarray(new_image).save(output_image_name) print(f"Saved {output_image_name}") print("Done.") main()
0
0