ngram
listlengths
0
67.8k
[ ") geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018", "label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\",", "\"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def", "\"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return", "= ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model", "class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", ) model = UsageSol", "class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 =", "Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code is", "geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 =", "= ( \"id\", \"surface\", ) geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer):", "CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if", "= s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self,", "= Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018,", "\"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015", ") geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields =", "\"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class", "\"map_color\", \"year\", ) geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 =", "obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj):", "= ( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class", "f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField()", "class Meta: fields = ( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model", "= s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self,", "s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label)", "usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def", "obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\",", "meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model", "= CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\",", "fields = ( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018", ") geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage", "= s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage,", "s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj):", "\"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture", "= CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\",", "= \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField()", ") def get_label(code=\"\", label=\"\"): if code is None: code = \"-\" if label", "Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return", "= ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer):", "\"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def", "\"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker", "Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def", "model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\",", "s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval,", "ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "\"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "\"couverture\", \"usage\", ) geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "from rest_framework import serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval,", "get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta:", "( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018 class", "s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj):", "\"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields", "obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields", "\"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return", "\"-\" if label is None: label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer):", "class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields =", "return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\",", "fields = ( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018", ".models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018,", "\"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", "\"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def", "= ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\",", "get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self,", "obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", )", "\"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer):", "Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\", ) geo_field =", "\"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields", "\"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj):", "label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\",", "model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta", "\"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage =", "geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self,", ") geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage", "import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol,", "( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model = Voirie2018 class", "= ( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval class", "get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\",", "<reponame>MTES-MCT/sparte from rest_framework_gis import serializers from rest_framework import serializers as s from .models", "s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\",", "geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = (", "as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015,", "get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\",", "return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\", )", "class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model =", "= s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018,", "obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\",", "( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class", "\"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta:", "= Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj):", "def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class", "label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label)", "return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return", "get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta:", "= ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018", "geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage =", "return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields =", "Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "\"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer):", "def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\",", "return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return", "class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", )", ") model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\",", "code = \"-\" if label is None: label = \"inconnu\" return f\"{code} {label[:30]}\"", ") geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def", "\"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\",", "\"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018", "fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\"", "CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model", "\"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\"", "return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\", )", "get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\",", "get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = (", "\"year\", ) geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField()", "Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields", "Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields", "= s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015,", "EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code", "return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", ) geo_field", "Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code is None:", "= \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField()", "\"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer", "Meta: fields = ( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model =", ") geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def", "SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\", ) geo_field = \"mpoly\" model", "None: code = \"-\" if label is None: label = \"inconnu\" return f\"{code}", "class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta:", "\"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "is None: code = \"-\" if label is None: label = \"inconnu\" return", "\"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model = Ocsge class", "class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model =", "from rest_framework_gis import serializers from rest_framework import serializers as s from .models import", "( \"id\", \"surface\", ) geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "\"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer):", "label=\"\"): if code is None: code = \"-\" if label is None: label", "= \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj):", "= \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker", "get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\", \"usage_2015\",", "obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\", \"surface\", )", "rest_framework import serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol,", "couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def", "label is None: label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 =", "Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code is None: code", "model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\",", "\"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018 class", "model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self,", "CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"):", "\"usage\", ) geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\",", "fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class", "get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\", \"surface\", ) geo_field =", "model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", "\"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015", "fields = ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model =", "\"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\",", "\"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015", "def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\",", "return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 =", "serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\"", "= \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\",", "Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field =", "class Meta: fields = ( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model", "class Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field", "label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\"", "class Meta: \"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", )", "class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\"", "GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\",", "import serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018,", "Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018", "def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def", "\"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "UsageSol, ) def get_label(code=\"\", label=\"\"): if code is None: code = \"-\" if", "Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol", "Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model", "get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\",", "get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\",", "\"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture", "Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class", "\"code_insee\", \"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 =", "geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta:", "class Meta: fields = ( \"id\", \"surface\", ) geo_field = \"mpoly\" model =", "obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\",", "label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", )", "Meta: fields = ( \"id\", \"surface\", ) geo_field = \"mpoly\" model = Sybarval", "obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields", "model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\", )", "couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def", "= ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model = Voirie2018", "= s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = (", "( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer):", "model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self,", "None: label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018", "get_label(code=\"\", label=\"\"): if code is None: code = \"-\" if label is None:", "UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", ) model =", "\"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\",", "EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return", "= \"mpoly\" model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField()", "\"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields =", "Meta: fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field =", "s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label)", "def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\",", "get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field", "OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label)", "\"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer):", "= \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\",", "= Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\"", "get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\", \"surface\",", "= \"mpoly\" model = Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField()", "get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field", "\"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class", "= \"-\" if label is None: label = \"inconnu\" return f\"{code} {label[:30]}\" class", "fields = ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\" model =", "class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", "fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval", "geo_field = \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self,", "= s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self,", "\"couverture\", \"surface\", ) geo_field = \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture =", "class Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field = \"mpoly\"", "{label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018", "return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\", \"surface\", ) geo_field", "label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\", ) geo_field =", "CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields = (", "label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", )", "obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj):", "= \"mpoly\" model = EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField()", "class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\", ) geo_field = \"mpoly\"", "Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField()", "def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\",", ") geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields =", "get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015,", "\"id\", \"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta:", "( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model =", "label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 =", "from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018,", "\"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015", "\"parent\", \"code\", \"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields", "class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", ") geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON serializer.\"\"\" class", "usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return", "is None: label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField()", "get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self,", "= Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField()", "= EnveloppeUrbaine2018 class OcsgeSerializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj):", "usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return", "Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code is None: code =", "code is None: code = \"-\" if label is None: label = \"inconnu\"", "s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label)", "class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture,", "label=obj.usage_label) class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field =", "if label is None: label = \"inconnu\" return f\"{code} {label[:30]}\" class Artificialisee2015to2018Serializer(serializers.GeoFeatureModelSerializer): usage_2015", "Voirie2018 class ZonesBaties2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj): return", "( \"nom\", \"code_insee\", \"surface\", ) geo_field = \"mpoly\" model = CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer):", "model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self,", "def get_label(code=\"\", label=\"\"): if code is None: code = \"-\" if label is", "ZonesBaties2018, UsageSol, ) def get_label(code=\"\", label=\"\"): if code is None: code = \"-\"", "CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", )", "Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 = s.SerializerMethodField() couverture_2018 = s.SerializerMethodField()", "get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", ) geo_field =", "= \"mpoly\" model = Artificialisee2015to2018 class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj):", "\"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015 class", "( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, )", "\"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field =", "= Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage = s.SerializerMethodField() def get_couverture(self, obj):", "serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge,", "= Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = ( \"id\", \"surface\", ) geo_field", "label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label)", "\"usage\", \"surface\", ) geo_field = \"mpoly\" model = ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta:", "class Artificielle2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta:", "Meta: fields = ( \"id\", \"couverture\", \"usage\", \"surface\", ) geo_field = \"mpoly\" model", "= ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model", "\"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field = \"mpoly\" model = Ocsge", "class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", ) model", "( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model =", "= ( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018 class", "( \"id\", \"surface\", \"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer):", "def get_usage_2018(self, obj): return get_label(code=obj.us_2018, label=obj.us_2018_label) def get_couverture_2015(self, obj): return get_label(code=obj.cs_2015, label=obj.cs_2015_label) def", "serializers from rest_framework import serializers as s from .models import ( Artificialisee2015to2018, Artificielle2018,", "label=obj.couverture_label) class Meta: fields = ( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\"", "if code is None: code = \"-\" if label is None: label =", "\"surface\", \"couverture\", ) geo_field = \"mpoly\" model = Artificielle2018 class CommunesSybarvalSerializer(serializers.GeoFeatureModelSerializer): \"\"\"Marker GeoJSON", "fields = ( \"id\", \"surface\", \"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\"", "\"surface\", ) geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField()", "Meta: fields = ( \"id\", \"couverture\", \"surface\", ) geo_field = \"mpoly\" model =", "Meta: \"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\", \"code_insee\", \"surface\", ) geo_field", "Artificielle2018, CommunesSybarval, CouvertureSol, EnveloppeUrbaine2018, Ocsge, Renaturee2018to2015, Sybarval, Voirie2018, ZonesBaties2018, UsageSol, ) def get_label(code=\"\",", "couverture_2018 = s.SerializerMethodField() def get_usage_2015(self, obj): return get_label(code=obj.us_2015, label=obj.us_2015_label) def get_usage_2018(self, obj): return", "EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields", "\"usage_2015\", \"usage_2018\", \"couverture_2015\", \"couverture_2018\", ) geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer):", "geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage =", "class Meta: fields = ( \"id\", \"couverture\", \"usage\", \"millesime\", \"map_color\", \"year\", ) geo_field", "obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = ( \"id\", \"surface\", \"couverture\", \"usage\",", "import serializers from rest_framework import serializers as s from .models import ( Artificialisee2015to2018,", "model = Ocsge class Renaturee2018to2015Serializer(serializers.GeoFeatureModelSerializer): usage_2015 = s.SerializerMethodField() usage_2018 = s.SerializerMethodField() couverture_2015 =", "def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class", "\"id\", \"surface\", ) geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture =", "\"\"\"Marker GeoJSON serializer.\"\"\" class Meta: \"\"\"Marker serializer meta class.\"\"\" fields = ( \"nom\",", "CommunesSybarval class EnveloppeUrbaine2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class", "rest_framework_gis import serializers from rest_framework import serializers as s from .models import (", "\"label\", \"is_artificial\", ) model = CouvertureSol class UsageSolSerializer(serializers.ModelSerializer): class Meta: fields = (", "couverture = s.SerializerMethodField() def get_couverture(self, obj): return get_label(code=obj.couverture, label=obj.couverture_label) class Meta: fields =", "fields = ( \"id\", \"surface\", ) geo_field = \"mpoly\" model = Sybarval class", "get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields = (", "ZonesBaties2018 class CouvertureSolSerializer(serializers.ModelSerializer): class Meta: fields = ( \"id\", \"parent\", \"code\", \"label\", \"is_artificial\",", "return get_label(code=obj.couverture, label=obj.couverture_label) def get_usage(self, obj): return get_label(code=obj.usage, label=obj.usage_label) class Meta: fields =", "geo_field = \"mpoly\" model = Renaturee2018to2015 class SybarvalSerializer(serializers.GeoFeatureModelSerializer): class Meta: fields = (", ") geo_field = \"mpoly\" model = Sybarval class Voirie2018Serializer(serializers.GeoFeatureModelSerializer): couverture = s.SerializerMethodField() usage", "def get_couverture_2018(self, obj): return get_label(code=obj.cs_2018, label=obj.cs_2018_label) class Meta: fields = ( \"id\", \"surface\"," ]
[ ".models import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\",", "django.contrib import admin from .models import SearchResult # Register your models here. class", "# Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\", \"text\"]", "import admin from .models import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin):", "from django.contrib import admin from .models import SearchResult # Register your models here.", "Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\", \"text\"] admin.site.register(SearchResult,", "import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\",", "from .models import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields =", "admin from .models import SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields", "your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\", \"text\"] admin.site.register(SearchResult, SearchResultAdmin)", "SearchResult # Register your models here. class SearchResultAdmin(admin.ModelAdmin): fields = [\"query\", \"heading\", \"url\"," ]
[ "output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data: Text,", "which supplies the training data. train_path: Directory in which to train the model.", "= await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config,", "'--domain' argument or check if the provided domain file exists.\" ) return nlu_model_only", "the model to be stored. persist_nlu_training_data: `True` if the NLU training data should", "-> Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path to the config file", "domain file. config: Dict of paths to the config for Core and NLU.", "additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Use", "compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config()", ") if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else:", "the path to the model archive, otherwise the path to the directory with", "_train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool,", "loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, )", "returns the path to the model archive, otherwise the path to the directory", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ): if not nlu_data:", "from rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult from", "def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text", "config: Text, stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] =", "# return # if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a Rasa", "directory with the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories]", "for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable", "Path of the trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config,", "\"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain() if", "= BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() # if domain.is_empty(): #", "as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer", "and config data.\"\"\" import rasa.core.train with ExitStack() as stack: if train_path: # If", "Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) ->", "Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool", "(Core and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the", "-> Optional[Text]: \"\"\"Train NLU with validated training and config data.\"\"\" import rasa.nlu.train with", "None, persist_nlu_training_data: bool = False, ): if not nlu_data: print_error( \"No NLU data", "model training will be skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output,", "from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await", "output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name", ") # if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training data given.", "Core model. Args: domain: Path to the domain file. config: Path to the", "the trained model archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() )", "\"train a Rasa Core model using the '--stories' argument.\" ) return return await", "to be stored. persist_nlu_training_data: `True` if the NLU training data should be persisted", "was found. Only an nlu-model was created.\" \"Please specify a valid domain using", "\"Please verify the data format. \" \"The NLU model training will be skipped", "to the NLU training data. output: Output path. train_path: If `None` the model", "None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains", "Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False,", "\"the updated templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color(", "None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete(", "\" \"Please specify a valid domain using '--domain' argument or check if the", "if not nlu_data: print_error( \"No NLU data given. Please provide NLU data in", "config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}> didn't", "train a Rasa model using the '--data' argument.\" # ) # return #", "None if not await file_importer.get_stories(): print_error( \"No stories given. Please provide stories in", "persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a", "of paths to the config for Core and NLU. Keys are language codes", "import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() #", "the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain", "to be stored. uncompress: If `True` the model will not be compressed. additional_arguments:", "fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] =", "Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name,", "return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async", "print_color(\"Skipping Core training since domain or stories are empty.\", color=bcolors.OKBLUE) for lang in", "os import tempfile from contextlib import ExitStack from typing import Text, Optional, List,", "or check if the provided domain file exists.\" ) return None if not", "train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, )", "trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files #", "# fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path)", "{} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer,", "codes training_files: Paths to the training data for Core and NLU. output_path: Output", "parameters. Returns: Path of the trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config(", "given. Please provide stories and NLU data in \" # \"order to train", "return # if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a Rasa Core", "model. fixed_model_name: Name of model to be stored. additional_arguments: Additional training parameters. Returns:", "loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments,", ") await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not change. No need", "in a temporary directory, otherwise in the provided directory. fixed_model_name: Name of the", "asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async", "train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] =", "# return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, #", "Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name:", "train_path) else: print_color( \"Core stories/configuration did not change. No need to retrain Core", "not changed. persist_nlu_training_data: `True` if the NLU training data should be persisted with", "training data should be persisted with the model. fixed_model_name: Name of model to", "asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) )", "NLU data in it. \" \"Please verify the data format. \" \"The NLU", "or check if the provided domain file exists.\" ) return nlu_model_only async def", "model archive, otherwise the path to the directory with the trained model files.", "persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, )", "model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu(", "config: Path to the config file for Core. stories: Path to the Core", "model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod > if fingerprint_comparison.nlu == True:", ") -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Use only from", "it. \" \"Please verify the data format. \" \"The NLU model training will", "use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def", "List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU with validated training and config", "return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # )", "Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains", "archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty()", "Path to the NLU training data. output: Output path. train_path: If `None` the", "should be persisted with the model. Returns: If `train_path` is given it returns", "did not change. No need to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu():", "return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer:", "a valid domain using '--domain' argument or check if the provided domain file", "train_path is None: # Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return", "on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for", "even if data has not changed. persist_nlu_training_data: `True` if the NLU training data", "cfg_loader if train_path: # If the train path was provided, do nothing on", "for Core and NLU. output_path: Output path. force_training: If `True` retrain model even", "output_path: Output path. force_training: If `True` retrain model even if data has not", "the path to the directory with the trained model files. \"\"\" loop =", "# print_error( # \"No training data given. Please provide stories and NLU data", "\"Core training was skipped because no valid domain file was found. Only an", "nlu-model was created.\" \"Please specify a valid domain using '--domain' argument or check", "core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l not in nlu_untrainable]", "train path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf", "in order to train \" \"a Rasa NLU model using the '--nlu' argument.\"", "print_error( \"Core training was skipped because no valid domain file was found. \"", "\" \"the updated templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else:", "from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color,", "...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data,", "file was found. Only an nlu-model was created.\" \"Please specify a valid domain", "= False, ) -> Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path to", "output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "# ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from", "train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text], config: Text,", "Rasa Core model using the '--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer,", "config: if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _", "output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text],", "print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train(", "rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning,", "to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data()", "stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text],", "stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core =", "file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text,", "was provided, do nothing on exit. _train_path = train_path else: # Otherwise, create", "the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training(", "_ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data", "# /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is None: #", "with ExitStack() as stack: models = {} from rasa.nlu import config as cfg_loader", "Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None,", "await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d", "in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l", "print_warning(\"No NLU data present. Just a Rasa Core model will be trained.\") #", "): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path,", "valid domain file was found. \" \"Please specify a valid domain using '--domain'", "\"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack()", "completed.\", color=bcolors.OKBLUE) if train_path is None: # Only NLU was trained new_fingerprint =", "fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core", "do nothing on exit. _train_path = train_path else: # Otherwise, create a temp", "train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config,", "output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path:", "output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use the old model", "change. \" \"Only the templates section has been changed. A new model with", "\"\"\"Trains a Rasa model (Core and NLU). Args: domain: Path to the domain", "fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}> didn't change, skipping training...\".format(lang),", "up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core", "import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output:", "path to the model archive, otherwise the path to the directory with the", ") async def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output: Text,", "training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain", ") training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid", "train_core( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] =", "domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if", "(Core and NLU). Args: domain: Path to the domain file. config: Dict of", "Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core", "= None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is None:", "Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict]", "files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas", "to the training data for Core and NLU. output_path: Output path. force_training: If", "otherwise the path to the directory with the trained model files. \"\"\" loop", "None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories,", "async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text],", "the directory with the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain,", "return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config:", "no valid domain file was found. \" \"Please specify a valid domain using", "= False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] =", "`TrainingDataImporter` which supplies the training data. train_path: Directory in which to train the", "training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain,", "Core and NLU. output_path: Output path. force_training: If `True` retrain model even if", "data given. Please provide NLU data in order to train \" \"a Rasa", "from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory in", "model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath", "the domain file. config: Dict of paths to the config for Core and", "the '--nlu' argument.\" ) return # training NLU only hence the training files", "force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments:", "Please provide NLU data in order to train \" \"a Rasa NLU model", "is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return", "to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path,", "new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\", ) return", "rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common", "\" # \"order to train a Rasa model using the '--data' argument.\" #", "fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod > if fingerprint_comparison.nlu", "= None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config,", "the model. additional_arguments: Additional training parameters. Returns: Path of the trained model archive.", "files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name,", "the NLU training data should be persisted with the model. additional_arguments: Additional training", "# training NLU only hence the training files still have to be selected", "model with \" \"the updated templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer,", "if domain.is_empty(): print_error( \"Core training was skipped because no valid domain file was", "loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async(", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains", "Args: domain: Path to the domain file. config: Dict of paths to the", "models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU", "model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter,", "using the '--data' argument.\" # ) # return # if nlu_data.is_empty(): # print_warning(\"No", "DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text", "\"No stories given. Please provide stories in order to \" \"train a Rasa", "model. additional_arguments: Additional training parameters. Returns: Path of the trained model archive. \"\"\"", "from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text,", "return _train_path def train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] =", "fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing", "still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas =", "\"\"\"Trains an NLU model. Args: config: Path to the config file for NLU.", "training data. train_path: Directory in which to train the model. output_path: Output path.", "l in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training", "bool = False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result =", "await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language", "'{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path:", "persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed.", "print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data in it. \" \"Please verify", "Additional training parameters. Returns: If `train_path` is given it returns the path to", "<{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\",", "an NLU model. Args: config: Path to the config file for NLU. nlu_data:", "def train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH,", "def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] =", "training will be skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path,", "None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop", "if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training data given. Please provide", ") from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files:", "from rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from", "Path to the config file for NLU. nlu_data: Path to the NLU training", "f\"Path '{nlu_data}' doesn't contain valid NLU data in it. \" \"Please verify the", "train_path: # If the train path was provided, do nothing on exit. _train_path", "stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files)", "data has not changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True`", "async def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None,", "output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model =", "import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import", "path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod", "List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None,", ") ) async def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text]", "provide stories in order to \" \"train a Rasa Core model using the", "with validated training and config data.\"\"\" import rasa.core.train with ExitStack() as stack: if", "training and config data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models = {}", "# Otherwise, create a temp train path and clean it up on exit.", "additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None,", "BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() # if", "bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain", "= TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path", "= await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data", "clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config =", "False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if", "TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None,", "if train_path is None: # Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer)", "training files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] )", "<{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer,", "TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack() as stack: train_path =", "not change. \" \"Only the templates section has been changed. A new model", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] =", "= None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]:", "= stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config", "A new model with \" \"the updated templates will be created.\", color=bcolors.OKBLUE, )", "and NLU data in \" # \"order to train a Rasa model using", "if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain or", "training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text]", "nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain or stories are empty.\", color=bcolors.OKBLUE)", "additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training)", "Name of the model to be stored. persist_nlu_training_data: `True` if the NLU training", "config, domain, training_files # ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) #", "model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not change. No need to retrain", "found. \" \"Please specify a valid domain using '--domain' argument or check if", "l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain or stories", "fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core", "'{nlu_data}' doesn't contain valid NLU data in it. \" \"Please verify the data", "await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def", "None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is None: try:", "training and config data.\"\"\" import rasa.core.train with ExitStack() as stack: if train_path: #", "NLU training data should be persisted with the model. fixed_model_name: Name of model", "training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path,", "NLU). Args: domain: Path to the domain file. config: Dict of paths to", "provide stories and NLU data in \" # \"order to train a Rasa", "empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data found for language <{}>,", "the config file for Core. stories: Path to the Core training data. output:", "\"No NLU data given. Please provide NLU data in order to train \"", "a Rasa model (Core and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter`", "additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core():", "_train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did", "file for NLU. nlu_data: Path to the NLU training data. output: Output path.", "mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start training", "bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start", "in \" # \"order to train a Rasa model using the '--data' argument.\"", "[stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped", "# Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output,", ") async def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] =", "Core training data. output: Output path. train_path: If `None` the model will be", "hence the training files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config,", "data in \" # \"order to train a Rasa model using the '--data'", "validated training and config data.\"\"\" import rasa.core.train with ExitStack() as stack: if train_path:", "color=bcolors.OKBLUE) if train_path is None: # Only Core was trained. new_fingerprint = await", "order to \" \"train a Rasa Core model using the '--stories' argument.\" )", "= False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult()", "for lang in nlu_untrainable: print_color(\"No NLU data found for language <{}>, skipping training...\".format(lang),", "print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text,", "the model will not be compressed. additional_arguments: Additional training parameters. Returns: If `train_path`", "file exists.\" ) return None if not await file_importer.get_stories(): print_error( \"No stories given.", "asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No", "did not change. \" \"Only the templates section has been changed. A new", "training since domain or stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No", "Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) ->", "stories and NLU data in \" # \"order to train a Rasa model", "directory, otherwise in the provided directory. fixed_model_name: Name of model to be stored.", "Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ): if", "You can use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model", "Keys are language codes training_files: Paths to the training data for Core and", "stored. additional_arguments: Additional training parameters. Returns: Path of the trained model archive. \"\"\"", "asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, )", "return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult]", "a temp train path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp()))", "model even if data has not changed. persist_nlu_training_data: `True` if the NLU training", "config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain:", ") return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def", "data/configuration did not change. No need to retrain NLU model.\", color=bcolors.OKBLUE, ) def", "= None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ): if not", "lang in nlu_untrainable: print_color(\"No NLU data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE)", "domain using '--domain' argument or check if the provided domain file exists.\" )", "language codes training_files: Paths to the training data for Core and NLU. output_path:", "has not changed. persist_nlu_training_data: `True` if the NLU training data should be persisted", "should be persisted with the model. fixed_model_name: Name of model to be stored.", "file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory in which to train", ") with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers", "List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None,", "stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data found for", "NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text, stories: Text,", "an nlu-model was created.\" \"Please specify a valid domain using '--domain' argument or", "NLU only hence the training files still have to be selected file_importer =", "stories/configuration did not change. No need to retrain Core model.\", color=bcolors.OKBLUE, ) if", ") async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not change. No", "Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] =", "rasa.nlu import config as cfg_loader if train_path: # If the train path was", "# file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack() as", "fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text], config: Dict[Text,", "format. \" \"The NLU model training will be skipped now.\" ) return return", "file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter,", "additional_arguments: Additional training parameters. Returns: If `train_path` is given it returns the path", "with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain()", "if the NLU training data should be persisted with the model. additional_arguments: Additional", "be stored. persist_nlu_training_data: `True` if the NLU training data should be persisted with", "for Core. stories: Path to the Core training data. output: Output path. train_path:", "stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer =", "if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return", "\"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data,", "# config, domain, training_files # ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp()))", "the data format. \" \"The NLU model training will be skipped now.\" )", "model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang),", "= model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model,", "from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import", "Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False,", "directory. fixed_model_name: Name of model to be stored. uncompress: If `True` the model", "check if the provided domain file exists.\" ) return nlu_model_only async def _train_async_internal(", "domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for", "def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data:", "domain file exists.\" ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text,", "If `True` retrain model even if data has not changed. persist_nlu_training_data: `True` if", "compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is given it returns the", "rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE)", "as stack: models = {} from rasa.nlu import config as cfg_loader if train_path:", "Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU with validated training and", "training parameters. Returns: Path of the trained model archive. \"\"\" stories, nlu_data =", "= False, ): if not nlu_data: print_error( \"No NLU data given. Please provide", "\"Core training was skipped because no valid domain file was found. \" \"Please", "domain.is_empty(): print_error( \"Core training was skipped because no valid domain file was found.", "the '--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments,", "file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core", "core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items()", "import rasa.core.train with ExitStack() as stack: if train_path: # If the train path", "bool = False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Rasa", "return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data(", "None, ) -> Optional[Text]: \"\"\"Train Core with validated training and config data.\"\"\" import", "Please provide stories and NLU data in \" # \"order to train a", "= asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data,", "TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain", "training_files) # domain = await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists(", "model to be stored. persist_nlu_training_data: `True` if the NLU training data should be", "if the NLU training data should be persisted with the model. Returns: If", "trained.\") # return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments,", "training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data in it. \" \"Please", "train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text]", "retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name,", "train_path ) # bf mod > if fingerprint_comparison.nlu == True: # replace True", "Args: domain: Path to the domain file. config: Path to the config file", "'--nlu' argument.\" ) return # training NLU only hence the training files still", "with the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] )", "\"Core stories/configuration did not change. \" \"Only the templates section has been changed.", "[l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not", "print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain:", "should be persisted with the model. additional_arguments: Additional training parameters. Returns: Path of", "fixed_model_name: Name of the model to be stored. persist_nlu_training_data: `True` if the NLU", "the trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files", "TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict]", "with the trained model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config,", "as stack: if train_path: # If the train path was provided, do nothing", "= await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l,", "if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l", "for l in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core", "= await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path", "mod > if fingerprint_comparison.nlu == True: # replace True with list of all", "training_files # ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod", "data for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU", "color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, )", "retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text, stories:", "completed.\", color=bcolors.OKBLUE) if train_path is None: # Only Core was trained. new_fingerprint =", "\"Please specify a valid domain using '--domain' argument or check if the provided", "data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if", "Rasa Core model will be trained.\") # return await _train_core_with_validated_data( # file_importer, #", "None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core with validated training", "nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name:", "asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, )", "`train_path` is given it returns the path to the model archive, otherwise the", "def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text]", "_do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text]", "file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] =", "\"\"\"Trains a Rasa model (Core and NLU). Use only from `train_async`. Args: file_importer:", ") await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training", "no valid domain file was found. Only an nlu-model was created.\" \"Please specify", "Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None,", "\" \"train a Rasa Core model using the '--stories' argument.\" ) return return", ") domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped because", "await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start training {} NLU model", "None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Core model. Args:", "If `None` the model will be trained in a temporary directory, otherwise in", "a Rasa model (Core and NLU). Args: domain: Path to the domain file.", "provide NLU data in order to train \" \"a Rasa NLU model using", "file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "data in order to train \" \"a Rasa NLU model using the '--nlu'", "not change. No need to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await", "fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text, nlu_data:", "= FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif", "didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE)", "= None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains an NLU model.", "None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None,", "Path to the domain file. config: Dict of paths to the config for", "`None` the model will be trained in a temporary directory, otherwise in the", "= await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison", "bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments,", "Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is", "Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path to the domain file. config:", "await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\",", "domain or stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data", "trained model archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) #", "= None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] =", "# file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await", "skipped because no valid domain file was found. \" \"Please specify a valid", "rasa.core.train with ExitStack() as stack: if train_path: # If the train path was", "fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and", "Rasa model (Core and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which", "= fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if", "training was skipped because no valid domain file was found. \" \"Please specify", "data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models = {} from rasa.nlu import", "the Core training data. output: Output path. train_path: If `None` the model will", "of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable =", "domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf", "with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import", "print_color( \"Core stories/configuration did not change. \" \"Only the templates section has been", "fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use the old model stored at", "nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped", "model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text, nlu_data: Text, output: Text, train_path:", "training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text]", "True ) -> Optional[Text]: \"\"\"Train NLU with validated training and config data.\"\"\" import", "file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty():", "in nlu_untrainable: print_color(\"No NLU data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) #", "not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name,", "if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod", "create a temp train path and clean it up on exit. _train_path =", "Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train(", "Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict]", "the model will be trained in a temporary directory, otherwise in the provided", "to the config file for Core. stories: Path to the Core training data.", "persisted with the model. additional_arguments: Additional training parameters. Returns: Path of the trained", "was created.\" \"Please specify a valid domain using '--domain' argument or check if", "color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not change. No", "model. Args: config: Path to the config file for NLU. nlu_data: Path to", "file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data in it.", ") new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not", "typing import Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa", "force_training: If `True` retrain model even if data has not changed. persist_nlu_training_data: `True`", "= stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config:", "model to be stored. additional_arguments: Additional training parameters. Returns: Path of the trained", "to the domain file. config: Dict of paths to the config for Core", "the '--data' argument.\" # ) # return # if nlu_data.is_empty(): # print_warning(\"No NLU", "training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang],", "has not changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True` if", "color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data found for language <{}>, skipping", "to the Core training data. output: Output path. train_path: If `None` the model", "# If the train path was provided, do nothing on exit. _train_path =", "and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config", "from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success,", ") return None if not await file_importer.get_stories(): print_error( \"No stories given. Please provide", "parameters. Returns: Path of the trained model archive. \"\"\" stories, nlu_data = await", "train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path,", "= TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error(", "persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains an NLU model. Args: config:", "changed. A new model with \" \"the updated templates will be created.\", color=bcolors.OKBLUE,", "file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments:", "in order to \" \"train a Rasa Core model using the '--stories' argument.\"", "rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}>", "persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE)", "output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because no valid domain file", "train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async(", "# </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name,", "_train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in", "changed. persist_nlu_training_data: `True` if the NLU training data should be persisted with the", "def train_core( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text]", "train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu:", "temporary directory, otherwise in the provided directory. fixed_model_name: Name of the model to", "in it. \" \"Please verify the data format. \" \"The NLU model training", "train path was provided, do nothing on exit. _train_path = train_path else: #", "fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, )", "NLU model. Args: config: Path to the config file for NLU. nlu_data: Path", "and config data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models = {} from", "print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await", "def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path,", "uncompress: If `True` the model will not be compressed. additional_arguments: Additional training parameters.", "d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l in", "skipped because no valid domain file was found. Only an nlu-model was created.\"", "_do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path,", "d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu =", "train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", "valid domain using '--domain' argument or check if the provided domain file exists.\"", "data for Core and NLU. output_path: Output path. force_training: If `True` retrain model", "domain file. config: Path to the config file for Core. stories: Path to", "retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not change. No need to", "templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration", "will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did", "model using the '--data' argument.\" # ) # return # if nlu_data.is_empty(): #", "_train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because no valid", "list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable =", ") print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only Core", "been changed. A new model with \" \"the updated templates will be created.\",", "found. Only an nlu-model was created.\" \"Please specify a valid domain using '--domain'", "be persisted with the model. Returns: If `train_path` is given it returns the", "# ) # /bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name,", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True", "need to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config:", "the templates section has been changed. A new model with \" \"the updated", "old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] =", "bool = False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU", "print_color(\"NLU data for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod", "model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path", "model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path,", "fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU", "import config as cfg_loader if train_path: # If the train path was provided,", "fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l not in nlu_untrainable] if", "None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None,", "data should be persisted with the model. fixed_model_name: Name of model to be", "on exit. _train_path = train_path else: # Otherwise, create a temp train path", "Otherwise, create a temp train path and clean it up on exit. _train_path", "with \" \"the updated templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path)", "model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain(", "stories: Path to the Core training data. output: Output path. train_path: If `None`", "TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool", "contain valid NLU data in it. \" \"Please verify the data format. \"", "of model to be stored. persist_nlu_training_data: `True` if the NLU training data should", "to the domain file. config: Path to the config file for Core. stories:", "_train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}> didn't change, skipping", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ): if not nlu_data: print_error(", "be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not", "Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name:", "\"\"\"Train NLU with validated training and config data.\"\"\" import rasa.nlu.train with ExitStack() as", "nlu_data: Path to the NLU training data. output: Output path. train_path: If `None`", "DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False,", "# if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training data given. Please", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains an", "training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() )", ") else: print_color( \"NLU data/configuration did not change. No need to retrain NLU", "did not change. No need to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core(", "file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path,", "retrain model even if data has not changed. persist_nlu_training_data: `True` if the NLU", "for NLU. nlu_data: Path to the NLU training data. output: Output path. train_path:", "was skipped because no valid domain file was found. Only an nlu-model was", "If `True` the model will not be compressed. additional_arguments: Additional training parameters. Returns:", "rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, )", "= await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for", ") return # training NLU only hence the training files still have to", "selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty():", "fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because no valid domain file was", "Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain,", "from contextlib import ExitStack from typing import Text, Optional, List, Union, Dict from", "persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and", "_train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain,", "for Core and NLU. Keys are language codes training_files: Paths to the training", "Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path to the config file for", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, )", "stories in order to \" \"train a Rasa Core model using the '--stories'", "Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True )", "\"No training data given. Please provide stories and NLU data in \" #", "{} from rasa.nlu import config as cfg_loader if train_path: # If the train", "can use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async", "was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\",", "change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if", "argument.\" # ) # return # if nlu_data.is_empty(): # print_warning(\"No NLU data present.", "== True: # replace True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\",", "\"NLU data/configuration did not change. No need to retrain NLU model.\", color=bcolors.OKBLUE, )", "if loop is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop()", "asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files,", "be persisted with the model. additional_arguments: Additional training parameters. Returns: Path of the", "language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training(", "ExitStack() as stack: models = {} from rasa.nlu import config as cfg_loader if", "file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name )", "from typing import Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from", "model will not be compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is", "config data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models = {} from rasa.nlu", "the provided directory. fixed_model_name: Name of the model to be stored. persist_nlu_training_data: `True`", "present. Just a Rasa Core model will be trained.\") # return await _train_core_with_validated_data(", "given it returns the path to the model archive, otherwise the path to", "{}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l", "Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False,", "model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data,", "= domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items() if", "model using the '--nlu' argument.\" ) return # training NLU only hence the", "with validated training and config data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models", "\"\"\"Train Core with validated training and config data.\"\"\" import rasa.core.train with ExitStack() as", "\"Nothing changed. You can use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) )", "domain = await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer,", "Returns: If `train_path` is given it returns the path to the model archive,", "if the NLU training data should be persisted with the model. fixed_model_name: Name", ") -> Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop() except RuntimeError:", "`train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory in which", "be trained.\") # return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, #", ") -> Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path to the config", "bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config:", "and nlu_data.is_empty(): # print_error( # \"No training data given. Please provide stories and", "True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await", "output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] =", "bool = False, ) -> Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path", "NLU model using the '--nlu' argument.\" ) return # training NLU only hence", "config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] =", "\" \"Only the templates section has been changed. A new model with \"", "config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool = False,", "if the provided domain file exists.\" ) return None if not await file_importer.get_stories():", "fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain", "Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop()", "model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await", "_train_path def train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None,", "for lang in config: if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE)", "Please provide stories in order to \" \"train a Rasa Core model using", "await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data in", "only hence the training files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config(", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop:", "_train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None,", "nlu_data.is_empty(): # print_error( # \"No training data given. Please provide stories and NLU", "the provided domain file exists.\" ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter,", "NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name,", "_train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did", "force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a", "training data should be persisted with the model. additional_arguments: Additional training parameters. Returns:", "bf mod > if fingerprint_comparison.nlu == True: # replace True with list of", "print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text,", "Path to the Core training data. output: Output path. train_path: If `None` the", "otherwise the path to the directory with the trained model files. \"\"\" file_importer", "a Rasa Core model using the '--stories' argument.\" ) return return await _train_core_with_validated_data(", "file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training data", "None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer,", "output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not change.", "data present. Just a Rasa Core model will be trained.\") # return await", "domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async(", "Name of model to be stored. persist_nlu_training_data: `True` if the NLU training data", "temp train path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) #", "data should be persisted with the model. additional_arguments: Additional training parameters. Returns: Path", "fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu", "= FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) #", "loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def", "model. Args: domain: Path to the domain file. config: Path to the config", "if fingerprint_comparison.nlu == True: # replace True with list of all langs fingerprint_comparison.nlu", "Directory in which to train the model. output_path: Output path. force_training: If `True`", "= TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack() as stack: train_path", "not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain or stories are", "not changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True` if the", "if train_path is None: # Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer)", "language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training", "# /bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments,", "NLU data in \" # \"order to train a Rasa model using the", "): if not nlu_data: print_error( \"No NLU data given. Please provide NLU data", "[l for l in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping", "if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color(", "from rasa.nlu import config as cfg_loader if train_path: # If the train path", "output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool =", "NLU training data should be persisted with the model. Returns: If `train_path` is", "the trained model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data,", "the model archive, otherwise the path to the directory with the trained model", "-> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path,", "provided directory. fixed_model_name: Name of model to be stored. uncompress: If `True` the", "in the provided directory. fixed_model_name: Name of model to be stored. uncompress: If", "async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path:", "skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path", "Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors,", "will not be compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is given", "lang in config: if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _,", "output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core", "= None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]]", "await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison =", "was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\",", "= stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain,", "return # training NLU only hence the training files still have to be", "model using the '--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path,", "# if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a Rasa Core model", "fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text, nlu_data: Text, output: Text,", "valid domain file was found. Only an nlu-model was created.\" \"Please specify a", "additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop", "persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None,", "= False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Rasa model", "train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, )", "Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "path. train_path: If `None` the model will be trained in a temporary directory,", "output_path, fixed_model_name # ) # /bf mod return await _train_async_internal( file_importer, train_path, output_path,", "Name of model to be stored. additional_arguments: Additional training parameters. Returns: Path of", "given. Please provide stories in order to \" \"train a Rasa Core model", "models = {} from rasa.nlu import config as cfg_loader if train_path: # If", "will be trained.\") # return await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name,", "parameters. Returns: If `train_path` is given it returns the path to the model", "force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ):", "domain file was found. \" \"Please specify a valid domain using '--domain' argument", "provided, do nothing on exit. _train_path = train_path else: # Otherwise, create a", "Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model import", "Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop]", "supplies the training data. train_path: Directory in which to train the model. output_path:", "fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not change. No need", "NLU data in order to train \" \"a Rasa NLU model using the", "of model to be stored. uncompress: If `True` the model will not be", "# bf mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) #", "stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if", "await file_importer.get_stories(): print_error( \"No stories given. Please provide stories in order to \"", "be skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data,", "Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path:", "was skipped because no valid domain file was found. \" \"Please specify a", "loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async", "await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name", "= train_path else: # Otherwise, create a temp train path and clean it", "/bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, )", "Dict from rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult", "new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training:", "Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Use only from `train_async`. Args:", "_train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", "fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty()", "the model. output_path: Output path. force_training: If `True` retrain model even if data", "provided domain file exists.\" ) return None if not await file_importer.get_stories(): print_error( \"No", "fixed_model_name: Name of model to be stored. uncompress: If `True` the model will", "output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path:", "return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data(", "it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await", "Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Core", "exists.\" ) return None if not await file_importer.get_stories(): print_error( \"No stories given. Please", "bool = False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) ->", "the training data. train_path: Directory in which to train the model. output_path: Output", "for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model", "If `True` retrain model even if data has not changed. fixed_model_name: Name of", "files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain()", "bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU).", "output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is", "file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not", "async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", ") async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "config, domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training", "`True` if the NLU training data should be persisted with the model. additional_arguments:", "the path to the directory with the trained model files. \"\"\" file_importer =", "domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped because no", "training data for Core and NLU. output_path: Output path. force_training: If `True` retrain", "else: print_color( \"NLU data/configuration did not change. No need to retrain NLU model.\",", "NLU. Keys are language codes training_files: Paths to the training data for Core", "persisted with the model. fixed_model_name: Name of model to be stored. additional_arguments: Additional", "Core model will be trained.\") # return await _train_core_with_validated_data( # file_importer, # output=output_path,", "= list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable", "config data.\"\"\" import rasa.core.train with ExitStack() as stack: if train_path: # If the", "exit. _train_path = train_path else: # Otherwise, create a temp train path and", "data format. \" \"The NLU model training will be skipped now.\" ) return", "fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model", "argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async", "a Rasa model using the '--data' argument.\" # ) # return # if", "BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() # if domain.is_empty(): # return", "contextlib import ExitStack from typing import Text, Optional, List, Union, Dict from rasa.importers.importer", "import ( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME", "fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments,", "provided directory. fixed_model_name: Name of the model to be stored. persist_nlu_training_data: `True` if", "persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text],", "Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory in which to", "True: # replace True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys())", "fingerprint_comparison.nlu == True: # replace True with list of all langs fingerprint_comparison.nlu =", "rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils", "archive, otherwise the path to the directory with the trained model files. \"\"\"", "= None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Core model.", "fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments:", "False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None,", "Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ) ->", "import TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import", "None: try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete(", "the config for Core and NLU. Keys are language codes training_files: Paths to", "change. No need to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data(", "training_files: Paths to the training data for Core and NLU. output_path: Output path.", "if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU data in it. \"", "persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result", "-> Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop", "= None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data(", "Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core with validated training and config", "return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def", ") ) async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text,", "print_error( \"No NLU data given. Please provide NLU data in order to train", "force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod > if", "the train path was provided, do nothing on exit. _train_path = train_path else:", "data has not changed. persist_nlu_training_data: `True` if the NLU training data should be", "config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text,", "mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only NLU", "in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since", "_train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint =", "return None if not await file_importer.get_stories(): print_error( \"No stories given. Please provide stories", "config: Dict of paths to the config for Core and NLU. Keys are", "-> Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path to the domain file.", "output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name,", "is None: # Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model(", "data should be persisted with the model. Returns: If `train_path` is given it", "= model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod > if fingerprint_comparison.nlu ==", "file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(\"NLU data for language <{}> didn't change,", "\"a Rasa NLU model using the '--nlu' argument.\" ) return # training NLU", "domain file was found. Only an nlu-model was created.\" \"Please specify a valid", "import tempfile from contextlib import ExitStack from typing import Text, Optional, List, Union,", "updated templates will be created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core", "model (Core and NLU). Args: domain: Path to the domain file. config: Dict", "persist_nlu_training_data: bool = False, ): if not nlu_data: print_error( \"No NLU data given.", "= None, ) -> Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path to", ") # return # if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a", "training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't contain valid NLU", "fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path )", "color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is None:", "def train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "file_importer, output_path, fixed_model_name # ) # /bf mod return await _train_async_internal( file_importer, train_path,", "for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await", "= None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ): if not", "stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter, output_path:", "domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] = None,", "persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only =", "be stored. additional_arguments: Additional training parameters. Returns: Path of the trained model archive.", "NLU data present. Just a Rasa Core model will be trained.\") # return", "output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text, nlu_data: Text,", "order to train \" \"a Rasa NLU model using the '--nlu' argument.\" )", "= None, persist_nlu_training_data: bool = False, ): if not nlu_data: print_error( \"No NLU", "file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments:", "Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ): if", "a Core model. Args: domain: Path to the domain file. config: Path to", "training data. output: Output path. train_path: If `None` the model will be trained", "nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data:", "_train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists( file_importer:", "config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}' doesn't", "retrain model even if data has not changed. fixed_model_name: Name of model to", "loop is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop)", "training...\".format(lang), color=bcolors.OKBLUE) # /bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is", "otherwise in the provided directory. fixed_model_name: Name of model to be stored. uncompress:", "NLU model training will be skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer,", "NLU. output_path: Output path. force_training: If `True` retrain model even if data has", "return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use", "def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name:", "NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data.", "persist_nlu_training_data: `True` if the NLU training data should be persisted with the model.", "additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files:", "be trained in a temporary directory, otherwise in the provided directory. fixed_model_name: Name", "training parameters. Returns: Path of the trained model archive. \"\"\" # file_importer =", "fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop =", "( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def", "not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf mod >", "if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a Rasa Core model will", "await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments,", "# ) # return # if nlu_data.is_empty(): # print_warning(\"No NLU data present. Just", "only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training data. train_path: Directory", "output: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "# Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output,", "created.\" \"Please specify a valid domain using '--domain' argument or check if the", "= None, ) -> Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop()", "None: # Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint,", "config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await", "not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l not in", "the model. fixed_model_name: Name of model to be stored. additional_arguments: Additional training parameters.", "the model. Returns: If `train_path` is given it returns the path to the", "fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a", "in config: if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang],", "argument or check if the provided domain file exists.\" ) return nlu_model_only async", "the training files still have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data]", "model archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if", "stack: models = {} from rasa.nlu import config as cfg_loader if train_path: #", "None, ) -> Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path to the", ") print_warning( \"Core training was skipped because no valid domain file was found.", "normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather(", "domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was", "`True` retrain model even if data has not changed. fixed_model_name: Name of model", "domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH,", "file for Core. stories: Path to the Core training data. output: Output path.", "directory with the trained model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async(", "model. Returns: If `train_path` is given it returns the path to the model", "as cfg_loader if train_path: # If the train path was provided, do nothing", "up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu)", "Output path. train_path: If `None` the model will be trained in a temporary", "templates section has been changed. A new model with \" \"the updated templates", "the training data for Core and NLU. output_path: Output path. force_training: If `True`", "valid NLU data in it. \" \"Please verify the data format. \" \"The", "Path to the config file for Core. stories: Path to the Core training", "was found. \" \"Please specify a valid domain using '--domain' argument or check", "async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", "nlu_untrainable = [l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core", "file. config: Dict of paths to the config for Core and NLU. Keys", "data. train_path: Directory in which to train the model. output_path: Output path. force_training:", "nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): #", "= await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path,", "using '--domain' argument or check if the provided domain file exists.\" ) return", "and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies the training", "be persisted with the model. fixed_model_name: Name of model to be stored. additional_arguments:", "color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer,", "# bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]:", ") return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training:", "Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name,", "rasa.nlu.train with ExitStack() as stack: models = {} from rasa.nlu import config as", ") -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output,", "data. output: Output path. train_path: If `None` the model will be trained in", "fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True` if the NLU training", "`True` retrain model even if data has not changed. persist_nlu_training_data: `True` if the", "_train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None,", "replace True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain =", "not change. No need to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain:", "tempfile from contextlib import ExitStack from typing import Text, Optional, List, Union, Dict", "training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path", "change. No need to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain,", "data in it. \" \"Please verify the data format. \" \"The NLU model", "NLU training data should be persisted with the model. additional_arguments: Additional training parameters.", "output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]:", "if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ =", "additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text,", "async def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path:", "rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain()", "= None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]:", "None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains", ") -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Args: domain: Path", ") -> Optional[Text]: \"\"\"Train Core with validated training and config data.\"\"\" import rasa.core.train", "_train_path = train_path else: # Otherwise, create a temp train path and clean", "If the train path was provided, do nothing on exit. _train_path = train_path", "fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data: Text, output: Text,", "path. force_training: If `True` retrain model even if data has not changed. persist_nlu_training_data:", "if data has not changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data:", "training NLU only hence the training files still have to be selected file_importer", "await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def handle_domain_if_not_exists(", "train_path is None: # Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return", "created.\", color=bcolors.OKBLUE, ) await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not change.", "await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration", "to train \" \"a Rasa NLU model using the '--nlu' argument.\" ) return", "the provided domain file exists.\" ) return None if not await file_importer.get_stories(): print_error(", "if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) #", "Additional training parameters. Returns: Path of the trained model archive. \"\"\" # file_importer", "= asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) )", "in which to train the model. output_path: Output path. force_training: If `True` retrain", "it returns the path to the model archive, otherwise the path to the", "model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text, stories: Text, output:", "Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu,", "file_importer = BotfrontFileImporter(config, domain, training_files) # domain = await file_importer.get_domain() # if domain.is_empty():", "asyncio import os import tempfile from contextlib import ExitStack from typing import Text,", "# print_warning(\"No NLU data present. Just a Rasa Core model will be trained.\")", "# bf mod > if fingerprint_comparison.nlu == True: # replace True with list", "'--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, )", "model. output_path: Output path. force_training: If `True` retrain model even if data has", "train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, )", "Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training:", "= asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config,", "domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items() if d.is_empty()]", "and not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l not", "await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped because no valid domain", "trained in a temporary directory, otherwise in the provided directory. fixed_model_name: Name of", "fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training", "of model to be stored. additional_arguments: Additional training parameters. Returns: Path of the", "verify the data format. \" \"The NLU model training will be skipped now.\"", "train_path: Directory in which to train the model. output_path: Output path. force_training: If", "TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning(", "Rasa model (Core and NLU). Args: domain: Path to the domain file. config:", "try: loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async(", "file was found. \" \"Please specify a valid domain using '--domain' argument or", "Text], config: Text, stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text]", "will be skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name,", "None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result:", "else: print_color( \"Core stories/configuration did not change. No need to retrain Core model.\",", "No need to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text],", "the config file for NLU. nlu_data: Path to the NLU training data. output:", "and NLU). Args: domain: Path to the domain file. config: Dict of paths", "data given. Please provide stories and NLU data in \" # \"order to", "= None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return", ") return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def", "Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import model", "train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], )", "persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path:", "now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async", "argument or check if the provided domain file exists.\" ) return None if", "since domain or stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU", "retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU with validated training", "return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool,", "using the '--nlu' argument.\" ) return # training NLU only hence the training", "of the trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain,", "skipped now.\" ) return return await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, )", "print_color(\"No NLU data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf", "file. config: Path to the config file for Core. stories: Path to the", "with the model. Returns: If `train_path` is given it returns the path to", "the provided directory. fixed_model_name: Name of model to be stored. uncompress: If `True`", "a temporary directory, otherwise in the provided directory. fixed_model_name: Name of model to", "domain, training_files # ) with ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf", "Name of model to be stored. uncompress: If `True` the model will not", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core with validated training and", "data.\"\"\" import rasa.core.train with ExitStack() as stack: if train_path: # If the train", "to the directory with the trained model files. \"\"\" loop = asyncio.get_event_loop() return", "stored. persist_nlu_training_data: `True` if the NLU training data should be persisted with the", "config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool =", "fixed_model_name # ) # /bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training,", "fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison", "found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required():", "changed. You can use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return", "new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return", "= await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\", ) return _train_path", "print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only NLU was", "config file for NLU. nlu_data: Path to the NLU training data. output: Output", "\"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text,", "FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg():", "else: print_color(\"NLU data for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) # /bf", "trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain =", "training parameters. Returns: If `train_path` is given it returns the path to the", "print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only Core was", "because no valid domain file was found. \" \"Please specify a valid domain", "new model with \" \"the updated templates will be created.\", color=bcolors.OKBLUE, ) await", "check if the provided domain file exists.\" ) return None if not await", "If `train_path` is given it returns the path to the model archive, otherwise", "config: Path to the config file for NLU. nlu_data: Path to the NLU", "Additional training parameters. Returns: Path of the trained model archive. \"\"\" stories, nlu_data", "or stories.is_empty() nlu_untrainable = [l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core", "\"Only the templates section has been changed. A new model with \" \"the", "# \"order to train a Rasa model using the '--data' argument.\" # )", "stories given. Please provide stories in order to \" \"train a Rasa Core", "it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training", "print_error( # \"No training data given. Please provide stories and NLU data in", "if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path,", ") elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \" \"Only the templates", "train \" \"a Rasa NLU model using the '--nlu' argument.\" ) return #", "path to the directory with the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config(", "Paths to the training data for Core and NLU. output_path: Output path. force_training:", "not nlu_data: print_error( \"No NLU data given. Please provide NLU data in order", "in nlu_untrainable] if core_untrainable: print_color(\"Skipping Core training since domain or stories are empty.\",", "l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu", "model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use the", "Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop() except RuntimeError: loop =", "TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants", "model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only Core was trained.", "Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is None: try: loop =", "config for Core and NLU. Keys are language codes training_files: Paths to the", "Core with validated training and config data.\"\"\" import rasa.core.train with ExitStack() as stack:", "await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf mod return await", "specify a valid domain using '--domain' argument or check if the provided domain", "is given it returns the path to the model archive, otherwise the path", "be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if", "new_fingerprint, old_model, train_path ) # bf mod > if fingerprint_comparison.nlu == True: #", "train_path else: # Otherwise, create a temp train path and clean it up", "nlu_data.is_empty(): # print_warning(\"No NLU data present. Just a Rasa Core model will be", "model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain,", "nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "# replace True with list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain", "train the model. output_path: Output path. force_training: If `True` retrain model even if", "domain file exists.\" ) return None if not await file_importer.get_stories(): print_error( \"No stories", "to the config file for NLU. nlu_data: Path to the NLU training data.", "\" \"The NLU model training will be skipped now.\" ) return return await", "handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name", "the NLU training data. output: Output path. train_path: If `None` the model will", "FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint, old_model, train_path ) # bf", "No need to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer,", "the NLU training data should be persisted with the model. fixed_model_name: Name of", "Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path,", "file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name #", "= await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because", ") return _train_path def train_nlu( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text]", "Text, config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool =", "archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # ) with", "output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text], config:", "async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data( file_importer,", "nothing on exit. _train_path = train_path else: # Otherwise, create a temp train", "file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start training {} NLU model ...\".format(lang),", "exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE)", "await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint,", "Text, stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None,", "import ExitStack from typing import Text, Optional, List, Union, Dict from rasa.importers.importer import", "`True` if the NLU training data should be persisted with the model. Returns:", "or stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data found", "Rasa model using the '--data' argument.\" # ) # return # if nlu_data.is_empty():", "Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import model from", "to the config for Core and NLU. Keys are language codes training_files: Paths", "False, ): if not nlu_data: print_error( \"No NLU data given. Please provide NLU", "= None, ) -> Optional[Text]: \"\"\"Train Core with validated training and config data.\"\"\"", "stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config =", "return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async def", "old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if not force_training: fingerprint_comparison = model.should_retrain( new_fingerprint,", "train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training:", "rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text, List[Text]],", "= asyncio.get_event_loop() return loop.run_until_complete( train_core_async( domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, )", "rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, ) from rasa.constants import DEFAULT_MODELS_PATH,", "skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path,", "import Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import", "TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error( f\"Path '{nlu_data}'", "Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Use only", "color=bcolors.OKBLUE) # </ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison,", "on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training print_color(\"Training Core model...\",", "Optional[Text] = None, persist_nlu_training_data: bool = False, ): if not nlu_data: print_error( \"No", "paths to the config for Core and NLU. Keys are language codes training_files:", "\"The NLU model training will be skipped now.\" ) return return await _train_nlu_with_validated_data(", "path to the directory with the trained model files. \"\"\" loop = asyncio.get_event_loop()", "model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # )", ") print_success( \"Nothing changed. You can use the old model stored at '{}'.\"", "\"\"\"Trains a Core model. Args: domain: Path to the domain file. config: Path", "mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data, additional_arguments, ) async", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: loop = asyncio.get_event_loop() return loop.run_until_complete( train_core_async(", "path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not", "rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model import FingerprintComparisonResult from rasa.core.domain", "using the '--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name,", "_train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output:", "exists.\" ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path: Text,", "config file for Core. stories: Path to the Core training data. output: Output", "= DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool =", "not await file_importer.get_stories(): print_error( \"No stories given. Please provide stories in order to", "handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf mod return await _train_async_internal(", "file exists.\" ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path: Text, output_path:", "\"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and", "has been changed. A new model with \" \"the updated templates will be", "= None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True ) ->", "fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] = None, ):", "# ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison = FingerprintComparisonResult(force_training=force_training) if", "= {} from rasa.nlu import config as cfg_loader if train_path: # If the", "fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l", "nlu_data: print_error( \"No NLU data given. Please provide NLU data in order to", "domain=domain, config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain:", ") # /bf mod return await _train_async_internal( file_importer, train_path, output_path, force_training, fixed_model_name, persist_nlu_training_data,", "Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool", "= await file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped because no valid", "langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty() or", "to be stored. additional_arguments: Additional training parameters. Returns: Path of the trained model", "= None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ) -> Optional[Text]:", "additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You", "print_success( \"Nothing changed. You can use the old model stored at '{}'.\" \"\".format(os.path.abspath(old_model))", ") return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can", "await _train_core_with_validated_data( # file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint", "import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error, bcolors, print_color, ) from", "NLU training data. output: Output path. train_path: If `None` the model will be", "file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because no valid domain", "file_importer.get_stories(): print_error( \"No stories given. Please provide stories in order to \" \"train", "ExitStack() as stack: train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod from rasa_addons.importers import BotfrontFileImporter", "additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Core model. Args: domain:", ") else: print_color(\"NLU data for language <{}> didn't change, skipping training...\".format(lang), color=bcolors.OKBLUE) #", "= await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error(", "return await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer:", "clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare) training", "model to be stored. uncompress: If `True` the model will not be compressed.", "NLU with validated training and config data.\"\"\" import rasa.nlu.train with ExitStack() as stack:", "None, ) -> Optional[Text]: if loop is None: try: loop = asyncio.get_event_loop() except", "TrainingDataImporter.load_core_importer_from_config( config, domain, [stories] ) domain = await file_importer.get_domain() if domain.is_empty(): print_error( \"Core", "import model from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import", "# if domain.is_empty(): # return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # )", "policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is None: #", "def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] =", "additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \" \"Only the", "-> Optional[Text]: \"\"\"Train Core with validated training and config data.\"\"\" import rasa.core.train with", "list of all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable", "False, additional_arguments: Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if", "with the model. fixed_model_name: Name of model to be stored. additional_arguments: Additional training", "= [l for l in fingerprint_comparison.nlu if l not in nlu_untrainable] if core_untrainable:", "> if fingerprint_comparison.nlu == True: # replace True with list of all langs", "Dict of paths to the config for Core and NLU. Keys are language", "loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name,", "persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train", "model will be trained.\") # return await _train_core_with_validated_data( # file_importer, # output=output_path, #", "at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text,", "color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text, stories: Text, output: Text,", "training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only NLU was trained new_fingerprint", "DEFAULT_CORE_SUBDIRECTORY_NAME def train( domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text =", "Core training since domain or stories are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable:", "Optional[Dict] = None, ): if not fingerprint_comparison_result: fingerprint_comparison_result = FingerprintComparisonResult() if fingerprint_comparison_result.should_retrain_core(): await", "output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change.", "train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config: Text, nlu_data: Text, output:", "`True` the model will not be compressed. additional_arguments: Additional training parameters. Returns: If", "trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"nlu-\", )", "False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU with validated", "train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text, nlu_data: Text, output:", "if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color(", "_train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output:", "train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool =", "model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only NLU was trained", "NLU data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </ bf mod", "color=bcolors.OKBLUE) if train_path is None: # Only NLU was trained new_fingerprint = await", "NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path,", "config=config, stories=stories, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain,", "def _train_nlu_async( config: Text, nlu_data: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name:", "await _train_core_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter,", "return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) )", "bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict], ) -> Optional[Text]: \"\"\"Trains a Rasa", "Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path,", "additional_arguments: Additional training parameters. Returns: Path of the trained model archive. \"\"\" stories,", "'--data' argument.\" # ) # return # if nlu_data.is_empty(): # print_warning(\"No NLU data", "are empty.\", color=bcolors.OKBLUE) for lang in nlu_untrainable: print_color(\"No NLU data found for language", "need to retrain Core model.\", color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path,", "path. force_training: If `True` retrain model even if data has not changed. fixed_model_name:", "= None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict] =", "to the directory with the trained model files. \"\"\" file_importer = TrainingDataImporter.load_core_importer_from_config( config,", "= await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start training {} NLU", "core_untrainable: print_color(\"Skipping Core training since domain or stories are empty.\", color=bcolors.OKBLUE) for lang", "file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await file_importer.get_nlu_data() if training_datas.is_empty(): print_error(", "and NLU. Keys are language codes training_files: Paths to the training data for", "stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty():", "domain, config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path,", "file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not", "be stored. uncompress: If `True` the model will not be compressed. additional_arguments: Additional", "\"Core stories/configuration did not change. No need to retrain Core model.\", color=bcolors.OKBLUE, )", "to train the model. output_path: Output path. force_training: If `True` retrain model even", "config = await file_importer.get_nlu_config(retrain_nlu) for lang in config: if config[lang]: print_color(\"Start training {}", "fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model(", "(not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await asyncio.gather( file_importer.get_domain(),", "trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", )", "): nlu_model_only = await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was", "# output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model", "Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path", "`True` if the NLU training data should be persisted with the model. fixed_model_name:", "stories/configuration did not change. \" \"Only the templates section has been changed. A", "</ bf mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data,", ") ) async def train_core_async( domain: Union[Domain, Text], config: Text, stories: Text, output:", "Args: config: Path to the config file for NLU. nlu_data: Path to the", "nlu_untrainable: print_color(\"No NLU data found for language <{}>, skipping training...\".format(lang), color=bcolors.OKBLUE) # </", "bool = False, ): if not nlu_data: print_error( \"No NLU data given. Please", "validated training and config data.\"\"\" import rasa.nlu.train with ExitStack() as stack: models =", "config = await asyncio.gather( file_importer.get_domain(), file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME),", "is None: # Only NLU was trained new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model(", "model.package_model( fingerprint=new_fingerprint, output_directory=output, train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix=\"core-\", ) return _train_path def train_nlu( config: Text,", "None, fixed_model_name: Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train", ") -> Optional[Text]: \"\"\"Train NLU with validated training and config data.\"\"\" import rasa.nlu.train", "from rasa.model import FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from", "fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text] =", "a temporary directory, otherwise in the provided directory. fixed_model_name: Name of the model", "file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter, output: Text,", "domain: Path to the domain file. config: Path to the config file for", "be compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is given it returns", "file_importer, # output=output_path, # fixed_model_name=fixed_model_name, # additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer)", "file_importer = TrainingDataImporter.load_from_config( # config, domain, training_files # ) with ExitStack() as stack:", "fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \" \"Only the templates section has", "persist_nlu_training_data=persist_nlu_training_data, ) else: print_color( \"NLU data/configuration did not change. No need to retrain", "given. Please provide NLU data in order to train \" \"a Rasa NLU", "await _train_nlu_with_validated_data( file_importer, output=output, train_path=train_path, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, ) async def _train_nlu_with_validated_data( file_importer: TrainingDataImporter,", "of the model to be stored. persist_nlu_training_data: `True` if the NLU training data", "None, ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Args: domain:", "the domain file. config: Path to the config file for Core. stories: Path", ") # bf mod > if fingerprint_comparison.nlu == True: # replace True with", "persisted with the model. Returns: If `train_path` is given it returns the path", "directory. fixed_model_name: Name of the model to be stored. persist_nlu_training_data: `True` if the", "# \"No training data given. Please provide stories and NLU data in \"", "Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool,", "force_training: If `True` retrain model even if data has not changed. fixed_model_name: Name", "# file_importer, output_path, fixed_model_name # ) # /bf mod return await _train_async_internal( file_importer,", "-> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Use only from `train_async`.", "domain: Path to the domain file. config: Dict of paths to the config", "temporary directory, otherwise in the provided directory. fixed_model_name: Name of model to be", "directory, otherwise in the provided directory. fixed_model_name: Name of the model to be", "stories: Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, additional_arguments:", "file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training", "output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data:", "model will be trained in a temporary directory, otherwise in the provided directory.", "-> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Args: domain: Path to", "output: Output path. train_path: If `None` the model will be trained in a", "training data given. Please provide stories and NLU data in \" # \"order", "import asyncio import os import tempfile from contextlib import ExitStack from typing import", "a Rasa Core model will be trained.\") # return await _train_core_with_validated_data( # file_importer,", "mod if fingerprint_comparison.is_training_required(): await _do_training( file_importer, output_path=output_path, train_path=train_path, fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, )", "await _train_core_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration", "doesn't contain valid NLU data in it. \" \"Please verify the data format.", "Optional[Text] = None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains an NLU", "= False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]: \"\"\"Train NLU with", "= None, ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Args:", "changed. fixed_model_name: Name of model to be stored. persist_nlu_training_data: `True` if the NLU", "force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def train_async( domain: Union[Domain, Text], config:", "if the provided domain file exists.\" ) return nlu_model_only async def _train_async_internal( file_importer:", "async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None,", "print_warning( \"Core training was skipped because no valid domain file was found. Only", "stories.is_empty() and nlu_data.is_empty(): # print_error( # \"No training data given. Please provide stories", "Just a Rasa Core model will be trained.\") # return await _train_core_with_validated_data( #", ") async def train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]],", "_train_nlu_async( config, nlu_data, output, train_path, fixed_model_name, persist_nlu_training_data, ) ) async def _train_nlu_async( config:", "fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \" \"Only", "not be compressed. additional_arguments: Additional training parameters. Returns: If `train_path` is given it", "Optional[Text]: \"\"\"Train NLU with validated training and config data.\"\"\" import rasa.nlu.train with ExitStack()", "with ExitStack() as stack: if train_path: # If the train path was provided,", "with the model. additional_arguments: Additional training parameters. Returns: Path of the trained model", "additional_arguments, ) async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await", "Path of the trained model archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(),", "None: # Only Core was trained. new_fingerprint = await model.model_fingerprint(file_importer) return model.package_model( fingerprint=new_fingerprint,", "old model stored at '{}'.\" \"\".format(os.path.abspath(old_model)) ) return old_model async def _do_training( file_importer:", "Output path. force_training: If `True` retrain model even if data has not changed.", "fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use the old", "Rasa NLU model using the '--nlu' argument.\" ) return # training NLU only", "of the trained model archive. \"\"\" stories, nlu_data = await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data()", "and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal (not compare)", "loop = asyncio.get_event_loop() except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain,", "loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]: if loop is None: try: loop", "train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ):", "= True ) -> Optional[Text]: \"\"\"Train NLU with validated training and config data.\"\"\"", "to train a Rasa model using the '--data' argument.\" # ) # return", "Optional[Text]: \"\"\"Train Core with validated training and config data.\"\"\" import rasa.core.train with ExitStack()", ") def train_core( domain: Union[Domain, Text], config: Text, stories: Text, output: Text, train_path:", "elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \" \"Only the templates section", "import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import ( print_success, print_warning, print_error,", "path was provided, do nothing on exit. _train_path = train_path else: # Otherwise,", "config as cfg_loader if train_path: # If the train path was provided, do", "train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) ) async def", "Optional[Union[Text, List[Text]]], output_path: Text = DEFAULT_MODELS_PATH, force_training: bool = False, fixed_model_name: Optional[Text] =", "training was skipped because no valid domain file was found. Only an nlu-model", "in the provided directory. fixed_model_name: Name of the model to be stored. persist_nlu_training_data:", "mod from rasa_addons.importers import BotfrontFileImporter file_importer = BotfrontFileImporter(config, domain, training_files) # domain =", "asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments,", "except RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output,", "fingerprint_comparison_result=fingerprint_comparison, fixed_model_name=fixed_model_name, persist_nlu_training_data=persist_nlu_training_data, additional_arguments=additional_arguments, ) return model.package_model( fingerprint=new_fingerprint, output_directory=output_path, train_path=train_path, fixed_model_name=fixed_model_name, ) print_success(", "DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is None:", "# domain = await file_importer.get_domain() # if domain.is_empty(): # return await handle_domain_if_not_exists( #", "old_model, train_path ) # bf mod > if fingerprint_comparison.nlu == True: # replace", "\" \"Please verify the data format. \" \"The NLU model training will be", "training data should be persisted with the model. Returns: If `train_path` is given", "await asyncio.gather( file_importer.get_stories(), file_importer.get_nlu_data() ) # if stories.is_empty() and nlu_data.is_empty(): # print_error( #", "have to be selected file_importer = TrainingDataImporter.load_nlu_importer_from_config( config, training_data_paths=[nlu_data] ) training_datas = await", "fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) ) async def train_core_async( domain: Union[Domain, Text], config: Text, stories:", "exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # bf mod config = await file_importer.get_nlu_config(retrain_nlu) for lang", "stored. uncompress: If `True` the model will not be compressed. additional_arguments: Additional training", "the NLU training data should be persisted with the model. Returns: If `train_path`", "TrainingDataImporter, train_path: Text, output_path: Text, force_training: bool, fixed_model_name: Optional[Text], persist_nlu_training_data: bool, additional_arguments: Optional[Dict],", "trained model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete( _train_nlu_async( config, nlu_data, output,", "color=bcolors.OKBLUE, ) if fingerprint_comparison_result.should_retrain_nlu(): await _train_nlu_with_validated_data( file_importer, output=output_path, train_path=train_path, fixed_model_name=fixed_model_name, retrain_nlu=fingerprint_comparison_result.nlu, persist_nlu_training_data=persist_nlu_training_data, )", "train_path: If `None` the model will be trained in a temporary directory, otherwise", "await model.update_model_with_new_domain(file_importer, train_path) else: print_color( \"Core stories/configuration did not change. No need to", "will be trained in a temporary directory, otherwise in the provided directory. fixed_model_name:", "model even if data has not changed. fixed_model_name: Name of model to be", "ExitStack() as stack: if train_path: # If the train path was provided, do", "file_importer.get_config() ) await rasa.core.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), policy_config=config, additional_arguments=additional_arguments, ) print_color(\"Core model", "and NLU. output_path: Output path. force_training: If `True` retrain model even if data", "False, ) -> Optional[Text]: \"\"\"Trains an NLU model. Args: config: Path to the", "# normal (not compare) training print_color(\"Training Core model...\", color=bcolors.OKBLUE) domain, config = await", "Core. stories: Path to the Core training data. output: Output path. train_path: If", "additional_arguments: Additional training parameters. Returns: Path of the trained model archive. \"\"\" #", "bool = False, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, additional_arguments: Optional[Dict]", "train path and clean it up on exit. _train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp())) # normal", "None, persist_nlu_training_data: bool = False, ) -> Optional[Text]: \"\"\"Trains an NLU model. Args:", "print_color( \"Core stories/configuration did not change. No need to retrain Core model.\", color=bcolors.OKBLUE,", "return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf mod return", "RuntimeError: loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) return loop.run_until_complete( train_async( domain=domain, config=config, training_files=training_files, output_path=output, force_training=force_training,", "provided domain file exists.\" ) return nlu_model_only async def _train_async_internal( file_importer: TrainingDataImporter, train_path:", "List, Union, Dict from rasa.importers.importer import TrainingDataImporter from rasa import model from rasa.model", "train_path=train_path, fixed_model_name=fixed_model_name, ) print_success( \"Nothing changed. You can use the old model stored", "to \" \"train a Rasa Core model using the '--stories' argument.\" ) return", "if train_path: # If the train path was provided, do nothing on exit.", "= [l for l, d in nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and", "argument.\" ) return # training NLU only hence the training files still have", "ExitStack from typing import Text, Optional, List, Union, Dict from rasa.importers.importer import TrainingDataImporter", "print_color( \"NLU data/configuration did not change. No need to retrain NLU model.\", color=bcolors.OKBLUE,", "\"order to train a Rasa model using the '--data' argument.\" # ) #", "in a temporary directory, otherwise in the provided directory. fixed_model_name: Name of model", "NLU data given. Please provide NLU data in order to train \" \"a", "print_color(\"Start training {} NLU model ...\".format(lang), color=bcolors.OKBLUE) _, models[lang], _ = await rasa.nlu.train(", ") -> Optional[Text]: \"\"\"Trains a Core model. Args: domain: Path to the domain", "Core model using the '--stories' argument.\" ) return return await _train_core_with_validated_data( file_importer, output=output,", "False, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core", "stack: if train_path: # If the train path was provided, do nothing on", "Returns: Path of the trained model archive. \"\"\" stories, nlu_data = await asyncio.gather(", "the directory with the trained model files. \"\"\" loop = asyncio.get_event_loop() return loop.run_until_complete(", "to retrain NLU model.\", color=bcolors.OKBLUE, ) def train_core( domain: Union[Domain, Text], config: Text,", "to the model archive, otherwise the path to the directory with the trained", "Path to the domain file. config: Path to the config file for Core.", "if not await file_importer.get_stories(): print_error( \"No stories given. Please provide stories in order", "if data has not changed. persist_nlu_training_data: `True` if the NLU training data should", "print_error( \"No stories given. Please provide stories in order to \" \"train a", "all langs fingerprint_comparison.nlu = list(new_fingerprint.get(\"nlu-config\", {}).keys()) domain = await file_importer.get_domain() core_untrainable = domain.is_empty()", "NLU. nlu_data: Path to the NLU training data. output: Output path. train_path: If", "_, models[lang], _ = await rasa.nlu.train( config[lang], file_importer, _train_path, fixed_model_name=\"nlu-{}\".format(lang), persist_nlu_training_data=persist_nlu_training_data, ) else:", "additional_arguments=additional_arguments, ) print_color(\"Core model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only", "await _train_nlu_with_validated_data( file_importer, output=output_path, fixed_model_name=fixed_model_name ) print_warning( \"Core training was skipped because no", "Core and NLU. Keys are language codes training_files: Paths to the training data", "which to train the model. output_path: Output path. force_training: If `True` retrain model", "Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU).", "/bf mod print_color(\"NLU model training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only", "if core_untrainable: print_color(\"Skipping Core training since domain or stories are empty.\", color=bcolors.OKBLUE) for", "Only an nlu-model was created.\" \"Please specify a valid domain using '--domain' argument", ") async def handle_domain_if_not_exists( file_importer: TrainingDataImporter, output_path, fixed_model_name ): nlu_model_only = await _train_nlu_with_validated_data(", "are language codes training_files: Paths to the training data for Core and NLU.", "Optional[Text]: \"\"\"Trains a Rasa model (Core and NLU). Args: domain: Path to the", "else: # Otherwise, create a temp train path and clean it up on", "import os import tempfile from contextlib import ExitStack from typing import Text, Optional,", "section has been changed. A new model with \" \"the updated templates will", "= None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core with validated", "import rasa.nlu.train with ExitStack() as stack: models = {} from rasa.nlu import config", "Optional[Text] = None, additional_arguments: Optional[Dict] = None, ) -> Optional[Text]: \"\"\"Train Core with", "domain, training_files) # domain = await file_importer.get_domain() # if domain.is_empty(): # return await", "# return await handle_domain_if_not_exists( # file_importer, output_path, fixed_model_name # ) # /bf mod", "Returns: Path of the trained model archive. \"\"\" # file_importer = TrainingDataImporter.load_from_config( #", "nlu_data.items() if d.is_empty()] fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable fingerprint_comparison.nlu = [l for", "because no valid domain file was found. Only an nlu-model was created.\" \"Please", ") return old_model async def _do_training( file_importer: TrainingDataImporter, output_path: Text, train_path: Text, fingerprint_comparison_result:", "train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) async def _train_core_with_validated_data( file_importer: TrainingDataImporter, output: Text, train_path: Optional[Text]", "Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool = False, ) ->", "fixed_model_name: Name of model to be stored. additional_arguments: Additional training parameters. Returns: Path", "train_path=train_path, fixed_model_name=fixed_model_name, additional_arguments=additional_arguments, ) elif fingerprint_comparison_result.should_retrain_nlg(): print_color( \"Core stories/configuration did not change. \"", "'--domain' argument or check if the provided domain file exists.\" ) return None", "model (Core and NLU). Use only from `train_async`. Args: file_importer: `TrainingDataImporter` which supplies", "\" \"a Rasa NLU model using the '--nlu' argument.\" ) return # training", "# additional_arguments=additional_arguments, # ) new_fingerprint = await model.model_fingerprint(file_importer) old_model = model.get_latest_model(output_path) fingerprint_comparison =", "domain: Text, config: Text, training_files: Union[Text, List[Text]], output: Text = DEFAULT_MODELS_PATH, force_training: bool", "otherwise in the provided directory. fixed_model_name: Name of the model to be stored.", "even if data has not changed. fixed_model_name: Name of model to be stored.", "FingerprintComparisonResult from rasa.core.domain import Domain from rasa.utils.common import TempDirectoryPath from rasa.cli.utils import (", "train_async( domain: Union[Domain, Text], config: Dict[Text, Text], training_files: Optional[Union[Text, List[Text]]], output_path: Text =", "None, persist_nlu_training_data: bool = False, retrain_nlu: Union[bool, List[Text]] = True ) -> Optional[Text]:", "file_importer.get_domain() core_untrainable = domain.is_empty() or stories.is_empty() nlu_untrainable = [l for l, d in", "= False, additional_arguments: Optional[Dict] = None, loop: Optional[asyncio.AbstractEventLoop] = None, ) -> Optional[Text]:", "Text, train_path: Text, fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool", "training completed.\", color=bcolors.OKBLUE) if train_path is None: # Only Core was trained. new_fingerprint", "file_importer.get_domain() if domain.is_empty(): print_error( \"Core training was skipped because no valid domain file", "Text, output: Text, train_path: Optional[Text] = None, fixed_model_name: Optional[Text] = None, persist_nlu_training_data: bool" ]
[ "for i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if", "def finalPrices(self, prices: List[int]) -> List[int]: res = [] for i in range(len(prices)):", "i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1:", "in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i])", "range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i]) res.append(prices[-1])", "res = [] for i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]:", "prices: List[int]) -> List[int]: res = [] for i in range(len(prices)): for j", "-> List[int]: res = [] for i in range(len(prices)): for j in range(i+1,len(prices)):", "List[int]: res = [] for i in range(len(prices)): for j in range(i+1,len(prices)): if", "= [] for i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j])", "finalPrices(self, prices: List[int]) -> List[int]: res = [] for i in range(len(prices)): for", "Solution: def finalPrices(self, prices: List[int]) -> List[int]: res = [] for i in", "for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i]) res.append(prices[-1]) return", "List[int]) -> List[int]: res = [] for i in range(len(prices)): for j in", "j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break if j==len(prices)-1: res.append(prices[i]) res.append(prices[-1]) return res", "[] for i in range(len(prices)): for j in range(i+1,len(prices)): if prices[j]<=prices[i]: res.append(prices[i]-prices[j]) break", "class Solution: def finalPrices(self, prices: List[int]) -> List[int]: res = [] for i", "<gh_stars>1-10 class Solution: def finalPrices(self, prices: List[int]) -> List[int]: res = [] for" ]
[ "_move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) ->", "source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding,", "self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id", "is_quit = False if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if", "0 if wincol == 1 else row + winheight filter_col = self._context['wincol'] else:", "height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True,", "Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate", "-> None: self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line()", "if key in self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer',", "= not [x for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if", "None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates:", "or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\"", "self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() ==", "self._denite.get_action( self._context, action_name, candidates) if not action: return post_action = self._context['post_action'] is_quit =", "[self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern =", "= max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = (", "int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb')", "if self._context['error_messages']: inpt = '[ERROR] ' + inpt path = '[' + self._context['path']", "+ linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', (", "for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line()", "if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor =", "self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal", "return {} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates:", "return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric():", "self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if", "if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has',", "int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+',", "= [] self._statusline_sources = '' self._titlestring = '' self._ruler = False self._prev_action =", "width, 'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr',", "init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW':", "else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')}", "r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1,", "self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if self._context['input']: inpt = self._context['input']", "[]) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr =", "def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight =", "self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not fired after set filetype", "self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler']", "winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) >", "not fired after set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd", "self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the window if", "-> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange", "if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler']", "len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor", "range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status()", "window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal instead of \"current.window.options\"", "'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status", "self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status", "self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']:", "self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$')", "self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name']))", "None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor = 0 self._entire_len", "= {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating = False self._filter_floating", "return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr", "'width': width, 'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win',", "= ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" +", "'SW' row = 0 self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW'", "self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow']", "Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return resume = self._initialized and context['resume']", "self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter window before", "!= prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg',", "False options['buflisted'] = False options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite'", "buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and", "prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt", "self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] ==", "0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating: # Move", "'aboveleft' if is_fit else 'topleft' else: direction = 'belowright' if is_fit else 'botright'", "+ opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow'] = row +", "empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option():", "' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or", "if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] =", "self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' +", "self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1) else: width = self._context['winwidth']", "None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already closed", "in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id", "short_name if source_names == 'short' else name return source_name def _get_candidate_display_text(self, index: int)", "self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches',", "self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange',", "1 def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor", "self._do_immediately() return True return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) ->", "self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight =", "self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol =", "== 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid =", "source_name = short_name if source_names == 'short' else name return source_name def _get_candidate_display_text(self,", "self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts =", "'start_filter', 'quick_move') for key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return", "'' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None:", "is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def", "= [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid =", "= False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text", "-= 1 def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) -> None:", "= (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1)", "cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if not is_quit and", "-> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately']", "def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self)", "= self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor)", "self._prev_wininfo: # Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim)", "for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) ->", "'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path']", "and not self._floating: # Move the window to bottom self._vim.command('wincmd J') self._winrestcmd =", "denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate", "self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable", "self._vim.command( 'silent keepalt %s %s %s %s' % ( self._get_direction(), vertical, command, bufnr,", "v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid',", "is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and", "str: direction = str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts()", "= self._context['winwidth'] height = self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']:", "name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names == 'short'", "False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if self._context['input']: inpt =", "return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context)", "self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' +", "from denite.parent import SyncParent class Default(object): @property def is_async(self) -> bool: return self._is_async", "{} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return", "'vertical' else '' command = ( 'buffer' if split in ['no', 'tab', 'floating',", "= True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates", "'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option():", "= row + opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow'] =", "= [self._get_cursor_candidate()] else: candidates = [] if not self._denite or not candidates or", "self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal", "max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] =", "False self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id", "if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer:", "split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] !=", "if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else:", "self._winminheight = -1 self._is_multi = False self._is_async = False self._matched_pattern = '' self._displayed_texts:", "inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR] ' +", "_stop_timer(self, key: str) -> None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key])", "is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] =", "False if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and", "'#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): #", "-> None: goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto:", "self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext =", "def _stop_timer(self, key: str) -> None: if key not in self._timers: return self._vim.call('timer_stop',", "'' return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating =", "range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] ==", "bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating", "self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] =", "vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = []", "1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for", "in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable')", "-> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') #", "%s %s %s' % ( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call(", "-> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer", "bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if direction == 'dynamictop' or", "self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\"", "self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) ->", "is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id']", "'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to", "!= context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources)", "Move to the previous window self._vim.command('wincmd p') def _do_command(self, command: str) -> None:", "= '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str,", "-> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None:", "row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width':", "command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr:", "pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None:", "key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use floating", "self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete'", "= self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win':", "sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite')", "== 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] =", "= '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2])", "0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if", "else row + winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config',", "if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')}", "}) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1)", "= self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def", "_update_status(self) -> None: inpt = '' if self._context['input']: inpt = self._context['input'] + '", "for x in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized", "# Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile']", "updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return", "def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None:", "split != 'no': command = self._get_direction() command += ' vsplit' if split ==", "self._timers[key]) # Note: After timer_stop is called, self._timers may be removed if key", "def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!')", "' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd',", "= self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates]", "not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action']", "str: source_names = self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name =", "not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not fired after set", "= self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #')", "self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if not", "= self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources", "[prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated =", "or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates", "row + winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid,", "Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: #", "else 'topleft' else: direction = 'belowright' if is_fit else 'botright' return direction def", "typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): #", "% ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained') %", "'' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] =", "= { 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total':", "self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab':", "_init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight']", "1) winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical' if not is_current_buffer:", "or split == 'tab' or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight,", "0 self._winminheight = -1 self._is_multi = False self._is_async = False self._matched_pattern = ''", "self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] =", "'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb')", "prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated", "+ \"%{\" + linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr,", "10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') ))", "self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer", "None: self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None: if key in", "= [] if not self._denite or not candidates or not action_name: return self._prev_action", "self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi =", "[] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line()", "self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer',", "str) -> None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note:", "link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default", "in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}')", "= -1 self._winid = -1 self._winrestcmd = '' self._initialized = False self._winheight =", "cursor = 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self)", "elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str)", "self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) -> None: self._context['input'] =", "'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0 if wincol ==", "str) -> None: if key in self._timers: return if key == 'update_candidates': self._timers[key]", "1) if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1) else: width", "def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight", "self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if not self._denite", "self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number ==", "'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'),", "-> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self,", "self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted']", "_update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']:", "buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:] =", "%s %s %s %s' % ( self._get_direction(), vertical, command, bufnr, ) ) else:", "== 'tab' or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1) winwidth", "resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool:", "resume: # Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move')", "self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative':", "self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col,", "'', is_manual: bool = False) -> None: if is_manual: candidates = self._get_selected_candidates() elif", "context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite", "hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result =", "> 0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite window", "self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear()", "False options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'):", "cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor", "twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return", "= candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates", "to the previous window self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor()", "def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if not self._is_multi or", "not self._floating: # Move the window to bottom self._vim.command('wincmd J') self._winrestcmd = ''", "+ \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \"", "value instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv')", "# Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if", "= self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] =", "< len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor >= 1:", "candidates = [] if not self._denite or not candidates or not action_name: return", "Default(object): @property def is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim) ->", "candidates) self._result = candidates if command != '': self._vim.command(command) if is_quit and post_action", "self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor = 0 self._entire_len =", "-1 self._winrestcmd = '' self._initialized = False self._winheight = 0 self._winwidth = 0", "None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) ->", "= self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name = '' else:", "self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({", "None: if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if", "After timer_stop is called, self._timers may be removed if key in self._timers: self._timers.pop(key)", "or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or", "key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called,", "self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical'", "Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object): @property def is_async(self)", "Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not self._candidates or", "self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo", "' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link", "# self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue']", "_start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path':", "default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement')", "-1 and candidates_len < winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight", "'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not", "= init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0]", "if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input'])", "i in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr ==", "-> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else:", "'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split", "self._winheight = winminheight elif candidates_len > max_height: self._winheight = max_height elif candidates_len !=", "filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed", "winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical'", "if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr =", "def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) ->", "foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap')", "self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit'", "'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent", "elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if", "bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) ->", "if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer')", "x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id =", "+ self._context['path'] + ']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path,", "False self._winheight = 0 self._winwidth = 0 self._winminheight = -1 self._is_multi = False", "self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']:", "_ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif", "= is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False", "+ 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight']", "] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command =", "> 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating: #", "if goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if", "vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int]", "== self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit =", "None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self)", "True return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None: if", "self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, })", "and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = ''", "self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor", "else name return source_name def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names']", "+ \"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if not", "may be removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) ->", "= self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite']", "matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id =", "and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len =", "= self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates']", "g<EMAIL>> # License: MIT license # ============================================================================ import re import typing from denite.util", "self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result", "self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches:", "= {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any]", "self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self)", "if wincol == 1 else row + winheight filter_col = self._context['wincol'] else: init_pos", "else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer()", "not [x for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction", "prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/',", "and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite window self._init_buffer() self.do_action('default')", "self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext)", "options['buflisted'] = False options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite' if", "'vertical' if split == 'vertical' else '' command = ( 'buffer' if split", "else ' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines'])", "changes global value instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3')", "False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring =", "= self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') ==", "self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor += 1", "prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word']", "self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context !=", "self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers']", "-> None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def", "Candidate: if not self._candidates or pos > len(self._candidates): return {} return self._candidates[pos -", "J') self._winrestcmd = '' return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window',", "'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position',", "-1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id", "if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def", "# Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for", "deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default", "in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int:", "if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len", "def _update_status(self) -> None: inpt = '' if self._context['input']: inpt = self._context['input'] +", "self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline']", "'.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [", "self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor", "self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor =", "'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position", "# Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in", "inpt = '' if self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']:", "self._floating = False self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int] =", "-> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim)", "= self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr',", "None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer '", "self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern =", "if self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) ->", "== 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col':", "( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite:", "'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] = False options['modifiable'] = False", "Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear", "context['resume'] if resume: # Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid',", "else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'),", "source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else", "<buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if", "direction = str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth", "'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating':", "self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not self._candidates or pos >", "None: split = self._context['split'] if (split != 'no' and self._winid > 0 and", "height = self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']: anchor =", "self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {}", "or pos > len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self) ->", "return True return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None:", "# License: MIT license # ============================================================================ import re import typing from denite.util import", "== 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical", "self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'],", "self._previous_text = '' self._floating = False self._filter_floating = False self._updated = False self._timers:", "self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if not is_quit and is_manual:", "candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height =", "self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) <", "self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden']", "for key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if", "'' self._titlestring = '' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any]", "= dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo =", "+ inpt path = '[' + self._context['path'] + ']' status = { 'input':", "( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}',", "self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self)", "and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid',", "@property def is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim) -> None:", "and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None:", "def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if", "self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = ''", "self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) -> None: self._context['input'] = ''", "self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete =", "if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) ->", "self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:]", "instead of \"current.window.options\" # \"current.window.options\" changes global value instead of local in #", "goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump", "self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars", "-> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if", "-> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name):", "if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context:", "self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close", "AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT license # ============================================================================ import re", "action_name: return self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates) if not", "self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height':", "# Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent", "name: str) -> str: source_names = self._context['source_names'] if not self._is_multi or source_names ==", "0: # Denite buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if", "bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or", "pos: int) -> Candidate: if not self._candidates or pos > len(self._candidates): return {}", "== 'no' or split == 'tab' or self._vim.call('winnr', '$') == 1): return winheight", "self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos:", "command: str) -> None: self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default',", "'%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'],", "[ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring", "1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr)", "== 1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async or", "split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating =", "self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None:", "else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor =", "self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self)", "(self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor =", "candidates) if not action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action", "1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async or self._candidates)", "_quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is", "= ( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer')", "'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo()", "Use floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, {", "= max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [", "if goto: # Move to the previous window self._vim.command('wincmd p') def _do_command(self, command:", "!= winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def", "self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the", "= '' self._initialized = False self._winheight = 0 self._winwidth = 0 self._winminheight =", "= { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options =", "= self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for", "winwidth = self._vim.call('winwidth', 0) is_fit = not [x for x in self._displayed_texts if", "= self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if", "error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer()", "candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in", "self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id =", "True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable']", "prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): #", "if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos,", "if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result", "elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line()", "_restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def", "candidates if command != '': self._vim.command(command) if is_quit and post_action == 'open': #", "!= 'no': command = self._get_direction() command += ' vsplit' if split == 'vertical'", "in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos']", "\"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" +", "self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd", "conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber')", "if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if", "context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map", "doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")')", "'.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern", "'col': 0, 'width': width, 'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window':", "' + inpt path = '[' + self._context['path'] + ']' status = {", "self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" +", "split == 'floating': if self._context['auto_resize'] and row > 1: row += self._context['winheight'] row", "self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use floating window if", "= 0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {}", "if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in", "self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore", "# AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT license # ============================================================================ import", "self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key", "not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do'])", "resume = self._initialized and context['resume'] if resume: # Skip the initialization update =", "'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', }", "not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input']", "self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self) ->", "in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto", "self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite", "+ str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len)", "if not self._denite or not candidates or not action_name: return self._prev_action = action_name", "typing.List[typing.Any] = [] self._previous_text = '' self._floating = False self._filter_floating = False self._updated", "is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not", "source_names = self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name = ''", "self._bufnr) < 0: # Denite buffer is already closed return winids = self._vim.call('win_findbuf',", "== 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if", "after set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite '", "-> None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def", "self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and", "self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources =", "+ self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth,", "return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor)", "else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources", "zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor += 1 def", ") ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str:", "keepalt {command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if direction ==", "if split == 'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name'] if", "= self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']: anchor = 'SW'", "typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating =", "else '' command = ( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window',", "Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return", "= '' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {}", "self._floating: # Move the window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return", "-> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi", "self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None:", "matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd',", "split: str) -> None: # Use floating window if split == 'floating': self._vim.call(", "= 'vertical' if split == 'vertical' else '' command = ( 'buffer' if", "self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if", "self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw']", "self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto')", "self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self,", "[]): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status =", "'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts()", "def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self)", "pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates):", "= 0 if wincol == 1 else row + winheight filter_col = self._context['wincol']", "def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms", "> 1 if not sources: # Ignore empty sources. error(self._vim, 'Empty sources') return", "p') def _do_command(self, command: str) -> None: self._init_cursor() cursor = 1 while cursor", "goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not", "= 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key: str)", "'' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) ->", "'' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name", "self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer()", "'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self,", "'[ERROR] ' + inpt path = '[' + self._context['path'] + ']' status =", "self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys():", "'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight',", "False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd", "if not self._is_multi or source_names == 'hide': source_name = '' else: short_name =", "self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options", "if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] != '':", "+ str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical", "self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move", "init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col']", "is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = []", "-> str: source_names = self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name", "if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split ==", "self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts", "str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth',", "self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1)", "'#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is", "preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr", "if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) #", "self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if", "str: source_names = self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi and", "and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to restore layout properly", "self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) ->", "<NAME> <<EMAIL> at g<EMAIL>> # License: MIT license # ============================================================================ import re import", "+ str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do']", "winwidth, 'height': winheight, }) filter_row = 0 if wincol == 1 else row", "if self._context['auto_resize'] and row > 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config',", "instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal", "<<EMAIL> at g<EMAIL>> # License: MIT license # ============================================================================ import re import typing", "split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname)", "-> None: split = self._context['split'] if (split == 'no' or split == 'tab'", "split == 'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists',", "'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else: direction = 'belowright' if", "< len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if", "bool) -> None: split = self._context['split'] if (split == 'no' or split ==", "{} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] =", "= self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight: if self._floating:", "None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches =", "direction == 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else: direction =", "in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite':", "winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight = max_height elif candidates_len", "self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates) if not action: return", "window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}]", "str) -> None: self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default', command)", "self._winrestcmd = '' return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ]", "if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr =", "previous window self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor() cursor =", "opened_pos + height + 3 > self._vim.options['lines']: anchor = 'SW' row = 0", "and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1", "winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow']", "self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key:", "FileType autocmd is not fired after set filetype option. self._vim.command('silent doautocmd FileType denite')", "row + opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow'] = row", "before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for", "'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % (", "= self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd =", "!= 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding,", "-> int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) -", "== self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter window before preview", "self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already closed return winids =", "nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes')", "self._denite or not candidates or not action_name: return self._prev_action = action_name action =", "else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row':", "self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal", "in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction =", "import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from", "self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter", "/^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' +", "dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo()", "not self._denite or not candidates or not action_name: return self._prev_action = action_name action", "# Ignore command line window. return resume = self._initialized and context['resume'] if resume:", "sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if not sources: #", "'&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" +", "self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars =", "+ \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else:", "self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor',", "license # ============================================================================ import re import typing from denite.util import echo, error, clearmatch,", "terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n',", "= self._initialized and context['resume'] if resume: # Skip the initialization update = ('immediately',", "= 'belowright' if is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return", "self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor)", "if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if", "self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth =", "'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if key not in self._timers:", "self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring']", "_do_command(self, command: str) -> None: self._init_cursor() cursor = 1 while cursor < len(self._candidates):", "= self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler =", "if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt", "= '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) -> None: self._context['is_redraw']", "the previous window self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor() cursor", "self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) ->", "x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer)", "self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word'])))", "self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine", "candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if", "self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split in [ 'floating', 'floating_relative_cursor',", "\"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr +", "if is_fit else 'topleft' else: direction = 'belowright' if is_fit else 'botright' return", "( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr", "self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = {", "filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer>", "else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result", "'/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not", "( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained') % (", "statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern", "x in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized =", "denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split", "and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) ->", "# Move the window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating", "LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = (", "not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts =", "_start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd!", "return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict()", "= self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True)", "self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid)", "= [] return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates()", "+ self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon']))", "self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr,", "self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr',", "== ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax", "self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates)", "'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match", "self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] = is_force", "None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth =", "default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' +", "}) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row", "self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link", "-1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern),", "= [ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False", "return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ]", "self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width", "if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor", "autocmd is not fired after set filetype option. self._vim.command('silent doautocmd FileType denite') if", "1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row':", "'no' or split == 'tab' or self._vim.call('winnr', '$') == 1): return winheight =", "UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], })", "filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, })", "' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split =", "self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for x in", "self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern", "self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = '", "= (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if", "ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight", "signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler", "'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row =", "None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context:", "self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed =", "'NW' row = 1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win',", "self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for k,", "============================================================================ # FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT", "and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if", "+ opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col':", "init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'],", "0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] =", "deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' +", "candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: #", "'') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines'])", "] def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete", "_close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def", "self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool", "denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object): @property", "self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if self._denite: self._denite.on_close(self._context)", "+ ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + '", "self._winwidth = 0 self._winminheight = -1 self._is_multi = False self._is_async = False self._matched_pattern", "= -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' +", "'' if self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt =", "\"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr +", "= self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if key not", "= self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, {", "'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width': width,", "0, 'width': width, 'height': height, 'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call(", "if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth']", "context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def", "re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return resume = self._initialized and", "self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format(", "3 > self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] = row +", "max(self._winwidth, 1) is_vertical = split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid')", "def _get_direction(self) -> str: direction = str(self._context['direction']) if direction == 'dynamictop' or direction", "buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' +", "name return source_name def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate", "' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth:", "!= '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window':", "removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: #", "None: self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer()", "= 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if", "= '' return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating", "restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate:", "self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the", "None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange '", "return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context)", "self._filter_floating = True elif split != 'no': command = self._get_direction() command += '", "elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True", "return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move", "'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr", "self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False", "zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if", "self._initialized = False self._winheight = 0 self._winwidth = 0 self._winminheight = -1 self._is_multi", "bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers']", "self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid > 0 and self._vim.call( 'win_gotoid',", "Candidates = [] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] = []", "self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') -", "FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT license #", "the window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split", "self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width !=", "if not self._candidates or pos > len(self._candidates): return {} return self._candidates[pos - 1]", "Move the window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating =", "filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight))", "self._cursor = self._vim.call('line', '.') # Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window')", "!= self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt =", "-> None: inpt = '' if self._context['input']: inpt = self._context['input'] + ' '", "== self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in", "!= 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if", "self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for", "elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _", "and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force:", "nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight')", "== self._prev_wininfo: # Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd)", "self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber", "def __init__(self, vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None", "+ 3 > self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] = row", "= {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor',", "False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = []", "= False self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int] = {}", "if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating", "else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: #", "== 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x for x", "= '' self._floating = False self._filter_floating = False self._updated = False self._timers: typing.Dict[str,", "max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i)", "if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' +", "goto: # Move to the previous window self._vim.command('wincmd p') def _do_command(self, command: str)", "'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] = context[key] self._check_move_option()", "+ regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ',", "'cursor', 'row': row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor, }) elif", "if self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor()", "is_vertical = split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid)", "row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor, }) elif split ==", "'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split']", "[] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = ''", "the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note:", "update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key]", "options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] = False options['modifiable']", "not sources: # Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates()", "'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif", "self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate", "and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return not", "+ ']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra", "1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True,", "'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0]", "self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating", "match deniteConcealedMark /^[ %s]/' + ' conceal contained') % ( self._context['selected_icon'])) if self._denite:", "or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and", "if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern !=", "not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid',", "= 'SW' row = 0 self._context['filter_winrow'] = row + opened_pos else: anchor =", "class Default(object): @property def is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim)", "bool: if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context)", "= self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" +", "'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False)", "_start_timer(self, key: str) -> None: if key in self._timers: return if key ==", "self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if self._context['input']:", "self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and", "self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True", "the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if not is_quit", "self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth))", "for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id", "ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False", "str) -> str: source_names = self._context['source_names'] if not self._is_multi or source_names == 'hide':", "] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''):", "return self._bufvars['denite_statusline'] = status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if", "> 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor',", "self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = []", "self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates')", "width = max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos", "self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) +", "-> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches", "def _update_candidates(self) -> bool: if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len,", "] if self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self)", "= 0 self._winwidth = 0 self._winminheight = -1 self._is_multi = False self._is_async =", "self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] =", "window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber',", "link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if", "self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates", "if resume: # Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter',", "== 'vertical' else '' command = ( 'buffer' if split in ['no', 'tab',", "+ str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating:", "{ 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight,", "int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if", "'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'],", "-1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern", "flag is_quit = False if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw'])", "opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col': 0,", "self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal", "{ 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if", "self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) ->", "cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer')", "-> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status()", "Denite buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: #", "str) -> None: # Use floating window if split == 'floating': self._vim.call( 'nvim_open_win',", "context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from", "self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \"", "self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if not sources: # Ignore", "\" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else: winnr =", "if self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR]", "= ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] =", "self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler", "action_name: str, command: str = '', is_manual: bool = False) -> None: if", "Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if", "# Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split']", "len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the previous window self._vim.command('wincmd p')", "init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] +", "bool = False) -> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates", "self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True) -> None:", "link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background']", "= self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr", "self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr',", "[] return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer()", "candidates or not action_name: return self._prev_action = action_name action = self._denite.get_action( self._context, action_name,", "{ 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row", "[] if not self._denite or not candidates or not action_name: return self._prev_action =", "context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None:", "self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return", "= '' self._titlestring = '' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str,", "'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split ==", "typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor =", "- 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if", "action_name action = self._denite.get_action( self._context, action_name, candidates) if not action: return post_action =", "and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating: # Move the", "self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int)", "= self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split)", "self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] =", "and self._previous_text != self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid =", "None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']:", "str, command: str = '', is_manual: bool = False) -> None: if is_manual:", "short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name", "= (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if", "self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources':", "0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr = -1 self._winid", "'quick_move') for key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer()", "0 self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW' row = 1", "'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist')", "self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern", "if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not fired after", "self._vim.call('winwidth', 0) is_fit = not [x for x in self._displayed_texts if self._vim.call('strwidth', x)", "'.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else (", "if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1) else: width =", "', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid})", "= False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'):", "self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] =", "self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name':", "'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), }", "context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str =", "None: inpt = '' if self._context['input']: inpt = self._context['input'] + ' ' if", "= [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else:", "self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates) ==", "}) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any],", "= self._candidates[index] terms = [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name'])))", "self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0])", "return resume = self._initialized and context['resume'] if resume: # Skip the initialization update", "self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '':", "or self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos'])", "self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self)", "' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal", "if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v", "self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row = wincol if split", "inpt path = '[' + self._context['path'] + ']' status = { 'input': inpt,", "pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates')", "%{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name: str) -> str:", "nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell')", "self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] =", "!= self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([", "self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call(", "'$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if", "self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c'", "False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources =", "0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize '", "= self._denite.get_action( self._context, action_name, candidates) if not action: return post_action = self._context['post_action'] is_quit", "self._result def do_action(self, action_name: str, command: str = '', is_manual: bool = False)", "- 1) if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1) else:", "= self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars", "height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row,", "self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'],", "= {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def", "is_force: bool = True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if", "SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name:", "{ 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell',", "self._is_multi or source_names == 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1',", "self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height':", "key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key]", "self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any]", "else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction =", "self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' +", "self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:'", "= 0 self._winminheight = -1 self._is_multi = False self._is_async = False self._matched_pattern =", "= ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources]", "is_quit and post_action == 'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate", "-> None: split = self._context['split'] if (split != 'no' and self._winid > 0", "self._floating: wincol = self._context['winrow'] row = wincol if split == 'floating': if self._context['auto_resize']", "} for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use", "split = self._context['split'] if (split != 'no' and self._winid > 0 and self._vim.call('win_gotoid',", "context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'],", "False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text !=", "position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute", "self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) ->", "deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link", "self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr =", "terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])])", "= self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to", "direction = 'belowright' if is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]:", "= self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import", "== 'floating': self._filter_floating = True elif split != 'no': command = self._get_direction() command", "of \"current.window.options\" # \"current.window.options\" changes global value instead of local in # neovim.", "typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call(", "== 1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical =", "setlocal instead of \"current.window.options\" # \"current.window.options\" changes global value instead of local in", "(self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1) width", "linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%*", "self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options =", "else ' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr =", "is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid)", "if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/'", "self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])):", "_get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else []", "= False self._winheight = 0 self._winwidth = 0 self._winminheight = -1 self._is_multi =", "self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to restore layout", "# neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal", "option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> '", "floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative':", "typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if", "self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor", "self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']:", "self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate", "self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return False [self._is_async, pattern, statuses,", "candidate = self._candidates[index] terms = [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format(", "'' self._floating = False self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int]", "None: goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if goto: #", "buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x in", "str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] !=", "self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context:", "Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete", "-> Candidate: if not self._candidates or pos > len(self._candidates): return {} return self._candidates[pos", "self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return", "self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to", "self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None:", "CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self)", "self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return", "'') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr", "self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts =", "quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self)", "'[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical'", "_cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.')", "True elif split != 'no': command = self._get_direction() command += ' vsplit' if", "0 self._winwidth = 0 self._winminheight = -1 self._is_multi = False self._is_async = False", "self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \"", "execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) ->", "self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating:", "+ height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row':", "if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to restore", "= 'aboveleft' if is_fit else 'topleft' else: direction = 'belowright' if is_fit else", "self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = []", "self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str,", "else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' % ( self._get_direction(), vertical,", "FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax()", "self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or", "None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context)", "!= 'vertical' and not self._floating: # Move the window to bottom self._vim.command('wincmd J')", "'belowright' if is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [", "'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor':", "# Note: Have to use setlocal instead of \"current.window.options\" # \"current.window.options\" changes global", "'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate =", "contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if", "not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']:", "= False if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual", "= 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) ->", "self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split == 'tab':", "1 if not sources: # Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite()", "self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height self._winwidth", "# if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim)", "return self._is_async def __init__(self, vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent]", "max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len =", "'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' % (", "self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for x", "= self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] =", "# Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option()", "link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler", "colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist')", "-> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not self._candidates", "% ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not", "or not action_name: return self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates)", "-> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos =", "if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for", "or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed: if not is_current_buffer:", "self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal", "-> None: self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None: if key", "self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos',", "of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn')", "window to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split in", "else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: #", "= False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] =", "if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates =", "['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s", "'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options:", "== 'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'):", "to denite window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] == 'no':", "self._context['auto_resize'] and row > 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid,", "' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default", "self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR] ' + inpt path", "self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) ->", "self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1", "not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers", "or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x", "self._candidates[index] terms = [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding", "'[' + self._context['path'] + ']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path':", "bufname) vertical = 'vertical' if split == 'vertical' else '' command = (", "'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height =", "-> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) ->", "= max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos +", "self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move',", "== 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the", "def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else", "= False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if self._context['input']: inpt", "if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi", "%s]/' + ' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def", "None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor',", "def do_action(self, action_name: str, command: str = '', is_manual: bool = False) ->", "to bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split in [", "\"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if not self._is_multi", "(winminheight != -1 and candidates_len < winminheight): self._winheight = winminheight elif candidates_len >", "self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False", "if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []):", "'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0 if", "if (split == 'no' or split == 'tab' or self._vim.call('winnr', '$') == 1):", "def _do_command(self, command: str) -> None: self._init_cursor() cursor = 1 while cursor <", "== '$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid > 0 and", "self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight", "anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative':", "restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def", "self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name", "- int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split']", "do_action(self, action_name: str, command: str = '', is_manual: bool = False) -> None:", "self._winid)): if split != 'vertical' and not self._floating: # Move the window to", "-> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for", "' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if (split", "\" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\"))", "== 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True", "= self._context['winrow'] row = wincol if split == 'floating': if self._context['auto_resize'] and row", "{ 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif", "+= 1 def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -= 1", "self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] +", "== self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if", "True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self)", "'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid']", "# Move to the previous window self._vim.command('wincmd p') def _do_command(self, command: str) ->", "None: # Use floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'),", "Note: Have to use setlocal instead of \"current.window.options\" # \"current.window.options\" changes global value", "= action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result", "k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal instead", "self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return", "== self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def", "= split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype']", "return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key ==", "(split == 'no' or split == 'tab' or self._vim.call('winnr', '$') == 1): return", "window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for", "= { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn',", "!= self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid',", "self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite =", "self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' +", "self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid > 0", "self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']):", "= max(self._winheight, 1) width = max(self._winwidth, 1) else: width = self._context['winwidth'] height =", "# Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def", "self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources']", "self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row", "timer_stop is called, self._timers may be removed if key in self._timers: self._timers.pop(key) def", "def is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim) -> None: self._vim", "if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction = 'aboveleft' if", "encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon'])", "== 'floating': if self._context['auto_resize'] and row > 1: row += self._context['winheight'] row -=", "init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor']", "'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap',", "or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth,", "if is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'],", "winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos =", "if split == 'floating': if self._context['auto_resize'] and row > 1: row += self._context['winheight']", "self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str = '', is_manual: bool", "= self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr)", "\"current.window.options\" # \"current.window.options\" changes global value instead of local in # neovim. self._vim.command('setlocal", "'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col =", "elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()):", "if split == 'vertical' else '' command = ( 'buffer' if split in", "self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else:", "status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating:", "== 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else: direction = 'belowright'", "def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int)", "self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources !=", "anchor = 'NW' row = 1 self._context['filter_winrow'] = row + height + opened_pos", "+= self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col':", "post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if is_quit: self.quit()", "if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already closed return winids", "self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if key", "self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction'])", "x) > winwidth] if direction == 'dynamictop': direction = 'aboveleft' if is_fit else", "if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command != '':", "= candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x", "1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate()", "1) is_vertical = split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid',", "return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '')", "self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern", "elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split !=", "'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos", "is_fit else 'topleft' else: direction = 'belowright' if is_fit else 'botright' return direction", "< 0: # Denite buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr'])", "-> None: # Use floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr',", "if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in", "if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern !=", "self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1,", "Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x]", "and post_action == 'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate =", "self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, {", "False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if", "= candidates if command != '': self._vim.command(command) if is_quit and post_action == 'open':", "def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+',", "-1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context:", "len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height())", "-1 self._is_multi = False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] =", "if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None:", "or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if", "name[:2]) source_name = short_name if source_names == 'short' else name return source_name def", "vsplit' if split == 'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name']", "('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] = context[key]", "called, self._timers may be removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split:", "'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew') elif", "== 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr',", "= bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not", "row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor',", "MIT license # ============================================================================ import re import typing from denite.util import echo, error,", "and context['resume'] if resume: # Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos',", "'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and", "str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ]", "None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): #", "self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1) width = max(self._winwidth, 1)", "!= '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1", "def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0],", "max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<'", "not action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit'", "_get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not", "# ============================================================================ import re import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim", "winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\"", "self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf',", "denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return resume =", "'height': winheight, }) filter_row = 0 if wincol == 1 else row +", "= '' if self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt", "self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts", "self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight", "(split != 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split !=", "self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline')", "self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' +", "[] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None: if self._denite:", "self._is_multi = len(sources) > 1 if not sources: # Ignore empty sources. error(self._vim,", "self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already closed return", "= [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id,", "Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] =", "self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {}", "= ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i", "_get_candidate(self, pos: int) -> Candidate: if not self._candidates or pos > len(self._candidates): return", "'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring']", "= '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar',", "{'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id =", "self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight): self._winheight = winminheight elif", "self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if (split != 'no' and", "self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable',", "= self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile'", "self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = '' if", "self._context['error_messages']: inpt = '[ERROR] ' + inpt path = '[' + self._context['path'] +", "self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor >=", "self._is_multi = False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] = []", "self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command != '': self._vim.command(command) if is_quit", "index in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) ->", "UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object): @property def is_async(self) ->", "self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if", "(re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names", "in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True", "self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if", "self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] =", "if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] =", "= self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col':", "is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer:", "self._vim.current.buffer.name): # Ignore command line window. return resume = self._initialized and context['resume'] if", "not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite", "self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move']", "'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] ==", "> len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if", "is_quit = action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates)", "winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config',", "if direction == 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else: direction", "max_source_name_len = 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates])", "%=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name:", "self._result = [] return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite()", "Note: After timer_stop is called, self._timers may be removed if key in self._timers:", "denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal',", "Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine", "+ \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names", "True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], })", "False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete", "'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if", "'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'],", "self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd", "source_names = self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi and source_names", "self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd')", "self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring = '' self._ruler = False", "self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight !=", "options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype'] =", "path = '[' + self._context['path'] + ']' status = { 'input': inpt, 'sources':", "def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and", "[] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context)", "+ linenr + \"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names']", "int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split =", "row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row,", "self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async", "= ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts != prev_displayed_texts", "self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn',", "is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self,", "pos > len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates:", "!= winheight: if self._floating: wincol = self._context['winrow'] row = wincol if split ==", "restore) def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif", "+ 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link", "Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] ==", "self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not", "split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not", "= '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) ->", "def _get_candidate(self, pos: int) -> Candidate: if not self._candidates or pos > len(self._candidates):", "'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col", "self._vim.call('line', '.') # Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not", "window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers", "self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark", "int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight']))", "not action_name: return self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates) if", "\"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names =", "None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self,", "for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr))", "= 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] ==", "_init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos", "= self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len <", "BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd is not fired", "if (winminheight != -1 and candidates_len < winminheight): self._winheight = winminheight elif candidates_len", "self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height':", "= self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern =", "max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos + height", "neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0')", "split != 'vertical' and not self._floating: # Move the window to bottom self._vim.command('wincmd", "_update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight =", "None: if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor", "if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if", "inpt = '[ERROR] ' + inpt path = '[' + self._context['path'] + ']'", "# Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline']", "'$') == 1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical", "'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s'", "self._winid) if not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow']", "== 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command", "self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0)", "nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']:", "_check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']):", "self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] =", "in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed", "elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not", "deniteConcealedMark /^[ %s]/' + ' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context,", "if not sources: # Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates()", "self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if", "if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates)", "= False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter')", "self._context['winwidth'] height = self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']: anchor", "'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link '", "(self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return", "= self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources", "1) width = max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight'] if", "Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items():", "= self._vim.call('winwidth', 0) is_fit = not [x for x in self._displayed_texts if self._vim.call('strwidth',", "self._bufnr = -1 self._winid = -1 self._winrestcmd = '' self._initialized = False self._winheight", "opened_pos else: anchor = 'NW' row = 1 self._context['filter_winrow'] = row + height", "def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if", "is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def", "'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace')", "{} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler']", "self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained') % ( self._context['selected_icon'])) if", "import re import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util", "_get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if not self._is_multi or source_names", "# \"current.window.options\" changes global value instead of local in # neovim. self._vim.command('setlocal colorcolumn=')", "= 1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'),", "self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index", "Candidates, Candidate from denite.parent import SyncParent class Default(object): @property def is_async(self) -> bool:", "self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit", "in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete',", "def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos if", "do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window.", "command line window. return resume = self._initialized and context['resume'] if resume: # Skip", "self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self,", "self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi and source_names != 'hide':", "= ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark')", "len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async", "= False options['buflisted'] = False options['modeline'] = False options['modifiable'] = False options['filetype'] =", "self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options:", "= pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor <", "direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def", "( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr", "row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row,", "+ self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' +", "typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text", "return self._floating = split in [ 'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False", "linenr + \"}%*\")) def _get_display_source_name(self, name: str) -> str: source_names = self._context['source_names'] if", "-> None: if self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self)", "= row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative':", "= False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] =", "if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None:", "> max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len", "( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command(", "def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer", "'tab' or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1) winwidth =", "def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight", "self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None:", "''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case", "'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'],", "{} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel',", "elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if not self._denite or", "self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight,", "self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command != '': self._vim.command(command) if", "( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split", "if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if", "= False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring", "= self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR] ' + inpt", "== 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]',", "is_current_buffer: bool) -> None: split = self._context['split'] if (split == 'no' or split", "'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'],", "'' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos:", "for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop':", "not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height !=", "def _switch_buffer(self) -> None: split = self._context['split'] if (split != 'no' and self._winid", "= False self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1", "str = '', is_manual: bool = False) -> None: if is_manual: candidates =", "return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path']", "and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty'] or self._is_async or self._candidates) def", "' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if", "self._context['winheight'] if opened_pos + height + 3 > self._vim.options['lines']: anchor = 'SW' row", "!= -1 and candidates_len < winminheight): self._winheight = winminheight elif candidates_len > max_height:", "{'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable']", "def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor =", "None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height", "-> None: if not context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0],", "= [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding']", "the window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k]", "if not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row", "= [] self._candidates: Candidates = [] self._cursor = 0 self._entire_len = 0 self._result:", "= self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if not", "self._timers may be removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str)", "%s' % ( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent", "self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating = False self._filter_floating = False", "'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name)", "self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status ==", "row = wincol if split == 'floating': if self._context['auto_resize'] and row > 1:", "}) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width", "self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext)", "else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None:", "candidate.get('abbr', candidate['word']))) if goto: # Move to the previous window self._vim.command('wincmd p') def", "self._context['source_names'] if not self._is_multi or source_names == 'hide': source_name = '' else: short_name", "self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to", "if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts", "context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command", "!= 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical'", "= self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated =", "self._vim.call('winnr', '$') == 1: self._vim.command('buffer #') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup()", "= len(self._candidates) def _start_timer(self, key: str) -> None: if key in self._timers: return", "denite window self._vim.call('win_gotoid', self._winid) # Restore the window if self._context['split'] == 'no': self._switch_prev_buffer()", "context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources:", "\"current.window.options\" changes global value instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal", "and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr = candidate.get('abbr', candidate['word']).encode(", "Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if not", "self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {}", "winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical' if not is_current_buffer: restore", "if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names == 'short' else", "x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction", "winheight: if self._floating: wincol = self._context['winrow'] row = wincol if split == 'floating':", "= self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or", "re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in", "self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map", "= sources self._context['is_redraw'] = False self._is_multi = len(sources) > 1 if not sources:", "window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in", "filter_row = 0 if wincol == 1 else row + winheight filter_col =", "'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if (split !=", "candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates),", "(self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed: if not", "self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width':", "'{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0,", "if index in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self)", "link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight", "' vsplit' if split == 'vertical' else ' split' bufname = '[denite]-' +", "re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names == 'short' else name", "[] self._previous_text = '' self._floating = False self._filter_floating = False self._updated = False", "> winwidth] if direction == 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft'", "'' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring = '' self._ruler =", "self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return True return not (self._context['empty']", "return self._prev_action = action_name action = self._denite.get_action( self._context, action_name, candidates) if not action:", "0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False", "winheight, }) filter_row = 0 if wincol == 1 else row + winheight", "winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative':", "self._context['input'] = '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool =", "UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$',", "wincol = self._context['winrow'] row = wincol if split == 'floating': if self._context['auto_resize'] and", "self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or", "+ height + 3 > self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow']", "self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format']", "self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor() cursor = 1 while", "if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts", "denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate ==", "for i in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr", "source_names == 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if", "self._result = candidates if command != '': self._vim.command(command) if is_quit and post_action ==", "command != '': self._vim.command(command) if is_quit and post_action == 'open': # Re-open denite", "'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])',", "%s %s' % ( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path',", "1 else row + winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid)", "def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew')", "' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or", "self._vim.current.window.width != winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore)", "typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext,", "= [] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context:", "elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split != 'no': command =", "self._context: UserContext = {} self._bufnr = -1 self._winid = -1 self._winrestcmd = ''", "'no': command = self._get_direction() command += ' vsplit' if split == 'vertical' else", "properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos:", "= [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text =", "candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if", "initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update:", "== 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit =", "errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ') + '", "= self._get_direction() command += ' vsplit' if split == 'vertical' else ' split'", "# Use floating window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True,", "+ winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0:", "bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical", "return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate: if not self._candidates or pos", "path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return", "save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate !=", "'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call(", "' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link '", "if opened_pos + height + 3 > self._vim.options['lines']: anchor = 'SW' row =", "+ \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def", "self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer =", "self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']:", "height + 3 > self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] =", "x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) +", "[] self._context: UserContext = {} self._bufnr = -1 self._winid = -1 self._winrestcmd =", "is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return", "== 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height", "(str(self._context['selected_icon']) if index in self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '') def", "if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row':", "is_async(self) -> bool: return self._is_async def __init__(self, vim: Nvim) -> None: self._vim =", "[ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer", "filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row =", "+ \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name: str)", "in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1", "None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note:", "self._context['winrow'] row = wincol if split == 'floating': if self._context['auto_resize'] and row >", "self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth,", "return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0])", "= [] self._previous_text = '' self._floating = False self._filter_floating = False self._updated =", "return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if is_quit:", "vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def", "self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id", "{} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self,", "post_action == 'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate()", "if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use", "self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction = 'aboveleft' if is_fit", "regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class", "') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not", "buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) # Restore", "echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to", "self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax", "self._winrestcmd = '' self._initialized = False self._winheight = 0 self._winwidth = 0 self._winminheight", "window. return resume = self._initialized and context['resume'] if resume: # Skip the initialization", "match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[", "-> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite buffer is already", "self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer')", "winheight, }) filter_col = init_pos['col'] if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid)", "False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split", "Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' +", "= short_name if source_names == 'short' else name return source_name def _get_candidate_display_text(self, index:", "self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width':", "self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context)", "'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x for x in", "self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) ->", "not (self._context['empty'] or self._is_async or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor", "previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete '", "already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer", "None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) -> None:", "max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight): self._winheight", "if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return False", "None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop", "'%'), True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height': height,", "'.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice", "self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], }", "self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor,", "self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return", "if not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if", "'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0", "else: direction = 'belowright' if is_fit else 'botright' return direction def _get_wininfo(self) ->", "len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' +", "'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note:", "else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name =", "_move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None: if", "return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates()", "'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have", "self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and", "= [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move'] = ''", "self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split", "return self._updated def _update_displayed_texts(self) -> None: candidates_len = len(self._candidates) if not self._is_async and", "width = self._context['winwidth'] height = self._context['winheight'] if opened_pos + height + 3 >", "bool = True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates():", "is not fired after set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']:", "len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number", "sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return", "self._prev_winid = self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number", "not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row =", "self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def", "[ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text", "or not candidates or not action_name: return self._prev_action = action_name action = self._denite.get_action(", "str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring']", "self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer',", "'silent keepalt %s %s %s %s' % ( self._get_direction(), vertical, command, bufnr, )", "def redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] = is_force if is_force:", "self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer:", "def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if (split == 'no'", "opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight,", "filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid,", "is_current_buffer: self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action'])", "context['sources_queue']: return self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0)", "self._context, action_name, candidates) if not action: return post_action = self._context['post_action'] is_quit = action['is_quit']", "'': self._vim.command(command) if is_quit and post_action == 'open': # Re-open denite buffer prev_cursor", "'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in update: self._context[key] = context[key] self._check_move_option() if", "= '[' + self._context['path'] + ']' status = { 'input': inpt, 'sources': self._statusline_sources,", "self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid =", "return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split ==", "self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line()", "self._sources_history.append({ 'sources': context['sources_queue'][0], 'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] =", "typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {} self._sources_history:", "self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal", "split = self._context['split'] if (split == 'no' or split == 'tab' or self._vim.call('winnr',", "self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid", "if is_quit and post_action == 'open': # Re-open denite buffer prev_cursor = self._cursor", "self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$':", "= False options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists',", "case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range'])", "False) -> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()]", "ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char'])", "if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext) ->", "re import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import", "if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0)", "+ winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, {", "default link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar", "= False self._is_multi = len(sources) > 1 if not sources: # Ignore empty", "context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False self._is_multi = len(sources) >", "self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) -> None: inpt = ''", "self._context['input']: inpt = self._context['input'] + ' ' if self._context['error_messages']: inpt = '[ERROR] '", "ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context)", "-= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth,", "if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is", "None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw']", "= self._context['split'] if (split != 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)):", "command: str = '', is_manual: bool = False) -> None: if is_manual: candidates", "post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if", "typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite = ASyncParent(self._vim) #", "return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts()", "' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) ->", "{} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn',", "'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' %", "( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in", "filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif", "-> bool: if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates] =", "source_name def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index]", "WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8,", "= '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical =", "self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line',", "height = max(self._winheight, 1) width = max(self._winwidth, 1) else: width = self._context['winwidth'] height", "self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else:", "do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None:", "== 1 else row + winheight filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config',", "= self._context['prev_winid'] self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid", "#') else: self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0:", "= [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command:", "self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo()", "'run_coroutine'): # self._denite = ASyncParent(self._vim) # else: self._denite = SyncParent(self._vim) self._result = []", "or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True", "self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates = [] if not self._denite or not", "= self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() #", "vertical = 'vertical' if split == 'vertical' else '' command = ( 'buffer'", "' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if", "= True elif split != 'no': command = self._get_direction() command += ' vsplit'", "1) else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos + height +", "self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif", "'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not", "self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1", "= len(sources) > 1 if not sources: # Ignore empty sources. error(self._vim, 'Empty", "is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']: self._context['input'] = '' self._context['quick_move']", "\"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self, name: str) ->", "None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _", "action = self._denite.get_action( self._context, action_name, candidates) if not action: return post_action = self._context['post_action']", "self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input']))", "-> None: if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None:", "self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment')", "def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) -", "options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter')", "= True self._vim.vars['denite#_candidates'] = [ x['word'] for x in self._candidates] buffer[:] = self._displayed_texts", "== 'short' else name return source_name def _get_candidate_display_text(self, index: int) -> str: source_names", "_update_candidates(self) -> bool: if not self._denite: return False [self._is_async, pattern, statuses, self._entire_len, self._candidates]", "winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window", "def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self)", "restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight: if", "= self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if", "source_names == 'short' else name return source_name def _get_candidate_display_text(self, index: int) -> str:", "# Restore the position self._vim.call('setpos', '.', self._prev_curpos) if self._get_wininfo() and self._get_wininfo() == self._prev_wininfo:", "k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!')", "< winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight = max_height elif", "status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'],", "is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight:", "self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor", "if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer:", "[] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid = self._context['prev_winid']", "+ str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput", "-1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr]", "for x in self._candidates]) self._context['max_source_name_len'] = max_source_name_len self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len'])", "= row if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row':", "window if split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor',", "if not action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action ==", "+ ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating", "'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in", "= {} self._save_window_options = {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline',", "In Vim8, FileType autocmd is not fired after set filetype option. self._vim.command('silent doautocmd", "(self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self) ->", "self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id =", "%{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar',", "import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line", "norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal", "= False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources", "key in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']:", "= 0 if self._candidates: max_source_name_len = max([ len(self._get_display_source_name(x['source_name'])) for x in self._candidates]) self._context['max_source_name_len']", "True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else:", "if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.',", "encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ')", "__init__(self, vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates:", "is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) ->", "matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer", "self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] = row + opened_pos else:", "Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!') #", "filter_col = self._context['wincol'] else: init_pos = self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win',", "{ 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {}", "'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] =", "direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit", "self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'):", "' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight default link ' + 'deniteSelectedLine Statement') if self._floating:", "self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id", "r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10,", "if self._vim.call('win_id2win', filter_winid) > 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col':", "# else: self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return", "if (split != 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split", "row, 'col': self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0 if wincol", "split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']:", "elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len", "and row > 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, {", "self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers may be", "'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' % ( self._get_direction(),", "[self._get_cursor_candidate()] else: candidates = [] if not self._denite or not candidates or not", "= False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = []", "self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" +", "or self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1", "key in self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr)", "= self._context['source_names'] candidate = self._candidates[index] terms = [] if self._is_multi and source_names !=", "self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)]", "name) else name[:2]) source_name = short_name if source_names == 'short' else name return", "self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine", "candidates_len < winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight = max_height", "= pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else:", "\" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid)", "self._winrestcmd = self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid')", "error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import", "not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto:", "-> None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After", "matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id,", "update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor)", "<gh_stars>0 # ============================================================================ # FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> #", "self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + '", "-> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) -> None:", "matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete',", "None: split = self._context['split'] if (split == 'no' or split == 'tab' or", "'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options", "= context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if", "self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call", "'$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid > 0 and self._vim.call(", "int] = {} self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete'))", "= True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def", "True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], })", "'width': winwidth, 'height': winheight, }) filter_row = 0 if wincol == 1 else", "at g<EMAIL>> # License: MIT license # ============================================================================ import re import typing from", "-> str: direction = str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom':", "max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical' if not", "in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split']", "(self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr))", "= {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring", "_move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) ->", "= 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] = False options['modifiable'] =", "candidates = [self._get_cursor_candidate()] else: candidates = [] if not self._denite or not candidates", "self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub(", "1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates) def _start_timer(self, key: str) ->", "= self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if", "self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']:", "'anchor': anchor, }) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, {", "= {} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list',", "len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not", "use setlocal instead of \"current.window.options\" # \"current.window.options\" changes global value instead of local", "= self._vim.current.window.options[k] # Note: Have to use setlocal instead of \"current.window.options\" # \"current.window.options\"", "self._titlestring = self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating:", "'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command !=", "return self._result def do_action(self, action_name: str, command: str = '', is_manual: bool =", "from denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore", "typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr = -1 self._winid = -1", "'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status", "'floating', 'floating_relative_cursor', 'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring =", "prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources", "= [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self) -> None:", "-> None: if self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) -> None:", "= winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win', filter_winid)", "# Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit = False if", "self._statusline_sources = '' self._titlestring = '' self._ruler = False self._prev_action = '' self._prev_status:", "nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options", "if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10,", "def _start_timer(self, key: str) -> None: if key in self._timers: return if key", "max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates:", "# Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid',", "+ '}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def", "self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None:", "self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical' else '' command =", "self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return False [self._is_async,", "False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return", "= action_name action = self._denite.get_action( self._context, action_name, candidates) if not action: return post_action", "'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal!", "-> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return", "self._move_to_first_line() def _move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos", "self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr", "self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol", "= 0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr = -1", "cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] = False options['buftype']", "self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr):", "self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default", "= self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] = [ x['word'] for", "terms = [] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding =", "concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal nofoldenable') self._vim.command('setlocal foldcolumn=0') self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber')", "self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']:", "if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split ==", "Ignore command line window. return resume = self._initialized and context['resume'] if resume: #", "self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'editor', 'row': row, 'col': self._context['wincol'], 'width': winwidth, 'height':", "self._cursor = len(self._candidates) def _start_timer(self, key: str) -> None: if key in self._timers:", "not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link", "self._previous_text != self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid')", "SyncParent class Default(object): @property def is_async(self) -> bool: return self._is_async def __init__(self, vim:", "if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and self._vim.current.window.height", "+ \" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else: winnr = self._vim.call('win_id2win',", "key: str) -> None: if key not in self._timers: return self._vim.call('timer_stop', self._timers[key]) #", "self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType autocmd", "anchor = 'SW' row = 0 self._context['filter_winrow'] = row + opened_pos else: anchor", "winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\"", "typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any] = {}", "split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s", "not self._selected_candidates: return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for x", "'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] = status", "' + 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] )", "= True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer()", "'vertical' else ' split' bufname = '[denite]-' + self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr", "options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline']", "} self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = {", "conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool:", "'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, }) filter_col = init_pos['col'] if", "self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated def _update_displayed_texts(self) -> None: candidates_len", "denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def", "options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd WinEnter') if self._vim.call('exists',", "'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k]", "bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical' else ''", "}) filter_row = 0 if wincol == 1 else row + winheight filter_col", "= [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str = '',", "[self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates] def", "if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return resume = self._initialized", "direction = 'aboveleft' if is_fit else 'topleft' else: direction = 'belowright' if is_fit", "_do_immediately(self) -> None: goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid) if", "{}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the previous window", "bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite:", "if command != '': self._vim.command(command) if is_quit and post_action == 'open': # Re-open", "_get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms =", "candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else", "None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos')", "self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not self._floating: # Move the window", "'[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern,", "options['swapfile'] = False options['buflisted'] = False options['modeline'] = False options['modifiable'] = False options['filetype']", "= split == 'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if", "Skip the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key", "if self._floating: wincol = self._context['winrow'] row = wincol if split == 'floating': if", "+ ' ' if self._context['error_messages']: inpt = '[ERROR] ' + inpt path =", "typing.List[str] = [] self._statusline_sources = '' self._titlestring = '' self._ruler = False self._prev_action", "context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self, sources: typing.List[typing.Any], context: UserContext)", "'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the", "self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid) self._matched_range_id = -1 if self._matched_char_id in matches:", "r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace('", "if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved <buffer> ' 'call denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self)", "inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if", "cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag is_quit", "self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split != 'no':", "'*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical' else", "return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def", "self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr =", "' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else", "typing.List[int] = [] self._candidates: Candidates = [] self._cursor = 0 self._entire_len = 0", "return True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately()", "_switch_buffer(self) -> None: split = self._context['split'] if (split != 'no' and self._winid >", "regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1', self._context['input'].replace(' ', '')", "closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid',", "candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0 if self._candidates: max_source_name_len =", "[x for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction ==", "self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call(", "self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the previous window self._vim.command('wincmd", "row > 1: row += self._context['winheight'] row -= self._winheight self._vim.call('nvim_win_set_config', self._winid, { 'relative':", "self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self)", "bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine')", "default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR')", "self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal", "in update: self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw()", "# Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']: self._vim.command('pclose!')", "{command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if direction == 'dynamictop'", "self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if", "-> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = []", "self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] !=", "quit flag is_quit = False if not is_quit and is_manual: self._selected_candidates = []", "self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name,", "if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated = False self._stop_timer('update_buffer') def _update_status(self)", "Disable quit flag is_quit = False if not is_quit and is_manual: self._selected_candidates =", "winwidth: self._vim.command('vertical resize ' + str(winwidth)) if not is_current_buffer: self._vim.call('win_gotoid', restore) def _check_do_option(self)", "_resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if (split == 'no' or", "if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} \" +", "set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite ' 'CursorMoved", "else: width = self._context['winwidth'] height = self._context['winheight'] if opened_pos + height + 3", "self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter window before preview window", "self._displayed_texts if self._vim.call('strwidth', x) > winwidth] if direction == 'dynamictop': direction = 'aboveleft'", "self._bufvars['denite_statusline'] = status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating", "self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def", "UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'): # self._denite =", "self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split", "status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\"", "self._split_floating(split) elif self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split != 'no': command", "self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer: self._vim.call('cursor',", "status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = (", "Have to use setlocal instead of \"current.window.options\" # \"current.window.options\" changes global value instead", "fired after set filetype option. self._vim.command('silent doautocmd FileType denite') if self._context['auto_action']: self._vim.command('autocmd denite", "= self._vim.call('line', '.') # Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if", "'floating': if self._context['auto_resize'] and row > 1: row += self._context['winheight'] row -= self._winheight", "'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width': winwidth, 'height': winheight, })", "CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] = self._titlestring self._vim.options['ruler'] = self._ruler def", "'floating': self._filter_floating = True elif split != 'no': command = self._get_direction() command +=", "if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', []) def", "'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] = False", "winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len", "self._context['is_redraw'] = False def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result =", "> self._vim.options['lines']: anchor = 'SW' row = 0 self._context['filter_winrow'] = row + opened_pos", "sources: # Ignore empty sources. error(self._vim, 'Empty sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor()", "[] if self._is_multi and source_names != 'hide': terms.append(self._context['max_source_name_format'].format( self._get_display_source_name(candidate['source_name']))) encoding = self._context['encoding'] abbr", "direction == 'dynamicbottom': self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x for", ">= 1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor = 1 def", "self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid)", "self._winid) filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if", "is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter", "( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') %", "v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if", "is_fit else 'botright' return direction def _get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'],", "self._context['path'] + ']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path, #", "[self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context)", "[] self._candidates: Candidates = [] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any]", "self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self)", "True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates =", "winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) # Restore the window", "10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer =", "not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool)", "context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources", "self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate", "_get_wininfo(self) -> typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) ->", "'%'), True, { 'relative': 'editor', 'row': self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'],", "self._update_displayed_texts() winwidth = self._vim.call('winwidth', 0) is_fit = not [x for x in self._displayed_texts", "= self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight = self._vim.current.window.height", "else name[:2]) source_name = short_name if source_names == 'short' else name return source_name", "from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object):", "denite.ui.map import do_map self._vim.command('silent! autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command", "UserContext = {} self._bufnr = -1 self._winid = -1 self._winrestcmd = '' self._initialized", "self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self,", "'.') # Note: Close filter window before preview window self._vim.call('denite#filter#_close_filter_window') if not self._context['has_preview_window']:", "[ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr ==", "is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed:", "= (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed: if", "keepalt %s %s %s %s' % ( self._get_direction(), vertical, command, bufnr, ) )", "if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In Vim8, FileType", "key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) ->", "import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent class Default(object): @property def", "if not candidate: return echo(self._vim, 'Normal', '[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if", "'': self._matched_range_id = self._vim.call( 'matchadd', 'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid})", "default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT license # ============================================================================", "# Denite buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids:", "!= 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if split == 'tab': self._vim.command('tabnew')", "split == 'floating': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'editor', 'row': self._context['winrow'],", "self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable", "self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'], 'width':", "self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained') % ( self._context['selected_icon']))", "+= ' vsplit' if split == 'vertical' else ' split' bufname = '[denite]-'", "in self._vim.vars['denite#_previewed_buffers'].keys(): if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] =", "def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number", "True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height': height, 'anchor':", "options['modeline'] = False options['modifiable'] = False options['filetype'] = 'denite' if self._vim.call('exists', '#WinEnter'): self._vim.command('doautocmd", "self._candidates) def _check_move_option(self) -> None: if self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif", "int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if (split ==", "to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor)", "f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction = str(self._context['direction']) if direction", "self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async:", "int) -> Candidate: if not self._candidates or pos > len(self._candidates): return {} return", "in range(0, candidates_len) ] def _update_buffer(self) -> None: is_current_buffer = self._bufnr == self._vim.current.buffer.number", "'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']:", "or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) ->", "self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if", "if init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight", "split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'],", "elif candidates_len > max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight =", "if split != 'vertical' and not self._floating: # Move the window to bottom", "return def redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] = is_force if", "== 'open': # Re-open denite buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer()", "action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if", "action_name, candidates) self._result = candidates if command != '': self._vim.command(command) if is_quit and", "False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False", "[] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext", "self._context['wincol'], 'width': winwidth, 'height': winheight, }) filter_row = 0 if wincol == 1", "candidate['word']))) if goto: # Move to the previous window self._vim.command('wincmd p') def _do_command(self,", "nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if", "False self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id", "\" + \"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\")", "local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal concealcursor=inv') self._vim.command('setlocal nocursorcolumn') self._vim.command('setlocal", "= v else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window()", "self._initialized = True self._winheight = self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None:", "Candidate from denite.parent import SyncParent class Default(object): @property def is_async(self) -> bool: return", "action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result =", "self._vim.call('cursor', [prev_linenr, 0]) if is_current_buffer: if (self._context['auto_action'] and prev_candidate != self._get_cursor_candidate()): self.do_action(self._context['auto_action']) self._updated", "self._bufvars['denite_statusline'] = {} self._vim.vars['denite#_previewed_buffers'] = {} self._save_window_options = {} window_options = { 'colorcolumn',", "\"%{\" + linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline',", "= -1 self._winrestcmd = '' self._initialized = False self._winheight = 0 self._winwidth =", "None: if key in self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call(", "self._titlestring self._vim.options['ruler'] = self._ruler def _close_current_window(self) -> None: if self._vim.call('winnr', '$') == 1:", "pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal!", "self._titlestring = '' self._ruler = False self._prev_action = '' self._prev_status: typing.Dict[str, typing.Any] =", "len(sources) > 1 if not sources: # Ignore empty sources. error(self._vim, 'Empty sources')", "= max(self._winwidth, 1) is_vertical = split == 'vertical' if not is_current_buffer: restore =", "== 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None:", "not self._candidates or pos > len(self._candidates): return {} return self._candidates[pos - 1] def", "self._candidates: Candidates = [] self._cursor = 0 self._entire_len = 0 self._result: typing.List[typing.Any] =", "in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use floating window", "self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) #", "self._winheight = max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0", "def _cleanup(self) -> None: self._stop_timer('update_candidates') self._stop_timer('update_buffer') if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line',", "return [self._get_cursor_candidate() ] if self._get_cursor_candidate() else [] return [self._candidates[x] for x in self._selected_candidates]", "link deniteInput ModeMsg') self._vim.command('highlight link deniteMatchedRange ' + self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar '", "buffer is already closed return winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit", "and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if split != 'vertical' and not", "else [] return [self._candidates[x] for x in self._selected_candidates] def _init_denite(self) -> None: if", "'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k", "-> None: if self._denite: self._denite.start(self._context) self._denite.on_init(self._context) self._initialized = True self._winheight = self._context['winheight'] self._winwidth", "+ ' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self)", "'topleft' else: direction = 'belowright' if is_fit else 'botright' return direction def _get_wininfo(self)", "def _split_floating(self, split: str) -> None: # Use floating window if split ==", "updated = (self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources)", "min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight): self._winheight = winminheight", "in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal instead of", "\"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\" + \"%#deniteStatusLineNumber#%{\" + linenr + \"}%*\")) def _get_display_source_name(self,", "self._vim.command('doautocmd WinEnter') if self._vim.call('exists', '#BufWinEnter'): self._vim.command('doautocmd BufWinEnter') if not self._vim.call('has', 'nvim'): # In", "'floating_relative_window', ] self._filter_floating = False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command", "'path': context['path'], }) self._start(context['sources_queue'][0], context) if context['sources_queue']: context['sources_queue'].pop(0) context['path'] = self._context['path'] def _start(self,", "{ 'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor,", "is called, self._timers may be removed if key in self._timers: self._timers.pop(key) def _split_floating(self,", "self._context[key] = context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else:", "(self._displayed_texts != prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated:", "not self._context['has_preview_window']: self._vim.command('pclose!') # Clear previewed buffers for bufnr in self._vim.vars['denite#_previewed_buffers'].keys(): if not", "True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and self._context['immediately_1']): self._do_immediately() return", "self._bufnr) elif key == 'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key:", "self._context['max_source_name_format'] = ( '{:<' + str(self._context['max_source_name_len']) + '}') self._displayed_texts = [ self._get_candidate_display_text(i) for", "clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent import SyncParent", "elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row':", "'nvim'): # In Vim8, FileType autocmd is not fired after set filetype option.", "= [] self._context: UserContext = {} self._bufnr = -1 self._winid = -1 self._winrestcmd", "return self._init_buffer() self._update_displayed_texts() self._update_buffer() self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if", "'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] =", "self._winid) if goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate()", "= 0 self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW' row =", "if not self._vim.call('win_findbuf', bufnr): self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight!", "Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self)", "[sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str = '', is_manual:", "self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname)", "if source_names == 'short' else name return source_name def _get_candidate_display_text(self, index: int) ->", "- int(self._vim.options['cmdheight'])) def _resize_buffer(self, is_current_buffer: bool) -> None: split = self._context['split'] if (split", "= None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates = [] self._cursor = 0", "+ self._context['buffer_name'] if self._vim.call('exists', '*bufadd'): bufnr = self._vim.call('bufadd', bufname) vertical = 'vertical' if", "from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates,", "max(self._winheight, 1) width = max(self._winwidth, 1) else: width = self._context['winwidth'] height = self._context['winheight']", "while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None: self._stop_timer('update_candidates')", "bool: return self._is_async def __init__(self, vim: Nvim) -> None: self._vim = vim self._denite:", "int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow']) - int(self._vim.options['cmdheight'])) def _resize_buffer(self,", "if self._vim.current.buffer.number == self._bufnr: self._cursor = self._vim.call('line', '.') # Note: Close filter window", "the initialization update = ('immediately', 'immediately_1', 'cursor_pos', 'prev_winid', 'start_filter', 'quick_move') for key in", "for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split'] == 'tab':", "self._context['winrow'], 'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos", "prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if self._context['search']", "not candidates or not action_name: return self._prev_action = action_name action = self._denite.get_action( self._context,", "signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating:", "self._timers.pop(key) def _split_floating(self, split: str) -> None: # Use floating window if split", "else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight", "in self._timers: return if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif", "% ( self._get_direction(), vertical, command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt", "-1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]:", "self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) ->", "'' self._initialized = False self._winheight = 0 self._winwidth = 0 self._winminheight = -1", "= winminheight elif candidates_len > max_height: self._winheight = max_height elif candidates_len != self._winheight:", "buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed']", "' ' if self._context['error_messages']: inpt = '[ERROR] ' + inpt path = '['", "self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated", "'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k]", "{} self._bufnr = -1 self._winid = -1 self._winrestcmd = '' self._initialized = False", "= False) -> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates =", "# Disable quit flag is_quit = False if not is_quit and is_manual: self._selected_candidates", "elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto = self._winid >", "0)[0] + self._vim.call('winline') - 1) if self._context['auto_resize']: height = max(self._winheight, 1) width =", "+ 'deniteSelectedLine Statement') if self._floating: self._vim.current.window.options['winhighlight'] = ( 'Normal:' + self._context['highlight_window_background'] ) self._vim.command(('syntax", "self._vim.command(command) if is_quit and post_action == 'open': # Re-open denite buffer prev_cursor =", "be removed if key in self._timers: self._timers.pop(key) def _split_floating(self, split: str) -> None:", "self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath# %{denite#get_status('path')}%*\"", "= self._context['split'] if (split == 'no' or split == 'tab' or self._vim.call('winnr', '$')", "self._initialized and context['resume'] if resume: # Skip the initialization update = ('immediately', 'immediately_1',", "= status self._prev_status = status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or", "context[key] self._check_move_option() if self._check_do_option(): return self._init_buffer() if context['refresh']: self.redraw() self._move_to_pos(self._cursor) else: if self._context", "or source_names == 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+', r'\\1', name)", "0 and self._vim.call( 'win_gotoid', self._winid) if goto: # Jump to denite window self._init_buffer()", "do_map(self, 'open_filter_buffer', []) def _init_buffer(self) -> None: self._prev_status = dict() self._displayed_texts = []", "-> bool: return self._is_async def __init__(self, vim: Nvim) -> None: self._vim = vim", "= '', is_manual: bool = False) -> None: if is_manual: candidates = self._get_selected_candidates()", "== 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win', 'row': self._context['winrow'], 'col':", "self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -=", "{ 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates),", "re.match(r'-\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def", "self._vim.command('enew') else: self._vim.command('buffer ' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore')", "= 'NW' row = 1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call(", ")) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr =", "and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row = wincol if", "self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer", "'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number', 'relativenumber', 'signcolumn', 'spell', 'winfixheight', 'wrap', } for", "self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if key not in", "self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if self._denite:", "self._cursor >= 1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor = 1", "} if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status linenr", "_switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name == ''): self._vim.command('enew') else:", "-> None: candidates_len = len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight']", "self._context['winheight'] self._winwidth = self._context['winwidth'] def _gather_candidates(self) -> None: self._selected_candidates = [] if self._denite:", "'win_gotoid', self._winid) if goto: # Jump to denite window self._init_buffer() self.do_action('default') candidate =", "row = 1 self._context['filter_winrow'] = row + height + opened_pos self._vim.call( 'nvim_open_win', self._vim.call('bufnr',", "self._vim.command('silent bdelete ' + str(bufnr)) self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if", "= self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor): # Restore", "denite#call_map(\"auto_action\")') self._init_syntax() def _switch_buffer(self) -> None: split = self._context['split'] if (split != 'no'", "is_fit = not [x for x in self._displayed_texts if self._vim.call('strwidth', x) > winwidth]", "1 def _move_to_prev_line(self) -> None: if self._cursor >= 1: self._cursor -= 1 def", "self._init_cursor() cursor = 1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def", "else: anchor = 'NW' row = 1 self._context['filter_winrow'] = row + height +", "if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize '", "!= prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated", "self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight):", "self._context['input'].replace(' ', '') )) self._matched_char_id = self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window':", "self._bufnr) def _stop_timer(self, key: str) -> None: if key not in self._timers: return", "= 'nofile' options['bufhidden'] = 'delete' options['swapfile'] = False options['buflisted'] = False options['modeline'] =", "split == 'vertical' else '' command = ( 'buffer' if split in ['no',", "self._context['split'] if (split != 'no' and self._winid > 0 and self._vim.call('win_gotoid', self._winid)): if", "self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width':", "self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if (self._prev_bufnr == self._bufnr or self._vim.buffers[self._prev_bufnr].name ==", "abbr = candidate.get('abbr', candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in", "self._context['reversed']: self._vim.command('normal! zb') def _move_to_next_line(self) -> None: if self._cursor < len(self._candidates): self._cursor +=", "self._vim.call('bufnr', '%'), True, { 'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height':", "_get_direction(self) -> str: direction = str(self._context['direction']) if direction == 'dynamictop' or direction ==", "'path': path, # Extra 'buffer_name': self._context['buffer_name'], 'line_total': len(self._candidates), } if status == self._prev_status:", "in self._timers: return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers may", "+ self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link '", "elif split != 'no': command = self._get_direction() command += ' vsplit' if split", "in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else 'sbuffer') self._vim.command( 'silent keepalt %s %s", "+ 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif re.match(r'-\\d+', self._context['cursor_pos']):", "' + str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link", "= vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] = [] self._candidates: Candidates =", "'': self._do_command(self._context['do']) return True elif (self._candidates and self._context['immediately'] or len(self._candidates) == 1 and", "{} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating = False self._filter_floating =", "!= '': self._vim.command(command) if is_quit and post_action == 'open': # Re-open denite buffer", "winminheight elif candidates_len > max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight", "range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None: goto =", "command = self._get_direction() command += ' vsplit' if split == 'vertical' else '", "-> None: if key in self._timers: return if key == 'update_candidates': self._timers[key] =", "and candidates_len < winminheight): self._winheight = winminheight elif candidates_len > max_height: self._winheight =", "']' status = { 'input': inpt, 'sources': self._statusline_sources, 'path': path, # Extra 'buffer_name':", "{} window_options = { 'colorcolumn', 'concealcursor', 'conceallevel', 'cursorcolumn', 'cursorline', 'foldcolumn', 'foldenable', 'list', 'number',", "to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate: return echo(self._vim,", "'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else: if self._context['split']", "'}') self._displayed_texts = [ self._get_candidate_display_text(i) for i in range(0, candidates_len) ] def _update_buffer(self)", "# ============================================================================ # FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License:", "self._winid = -1 self._winrestcmd = '' self._initialized = False self._winheight = 0 self._winwidth", "self._displayed_texts = [] self._prev_bufnr = self._vim.current.buffer.number self._prev_curpos = self._vim.call('getcurpos') self._prev_wininfo = self._get_wininfo() self._prev_winid", "= min(self._context['winheight'], self._get_max_height()) if (winminheight != -1 and candidates_len < winminheight): self._winheight =", "not self._is_multi or source_names == 'hide': source_name = '' else: short_name = (re.sub(r'([a-zA-Z])[a-zA-Z]+',", "command = 'edit' if split == 'tab': self._vim.command('tabnew') elif self._floating: self._split_floating(split) elif self._context['filter_split_direction']", "self._denite.init_syntax(self._context, self._is_multi) def _update_candidates(self) -> bool: if not self._denite: return False [self._is_async, pattern,", "\"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \" +", "is_manual: bool = False) -> None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate():", "============================================================================ import re import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from", "if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def", "self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr = -1 self._winid =", "self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not", "def _restart(self) -> None: self._context['input'] = '' self._quit_buffer() self._init_denite() self._gather_candidates() self._init_buffer() self._update_candidates() self._update_buffer()", "if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite", "= self._vim.call('bufadd', bufname) vertical = 'vertical' if split == 'vertical' else '' command", "winwidth] if direction == 'dynamictop': direction = 'aboveleft' if is_fit else 'topleft' else:", "'update_buffer': self._timers[key] = self._vim.call( 'denite#helper#_start_update_buffer_timer', self._bufnr) def _stop_timer(self, key: str) -> None: if", "pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern", "buffer prev_cursor = self._cursor cursor_candidate = self._get_cursor_candidate() self._init_buffer() self.redraw(False) if cursor_candidate == self._get_candidate(prev_cursor):", "'vertical' if not is_current_buffer: restore = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) if not is_vertical and", "_ in range(int(self._context['cursor_pos'][1:])): self._move_to_prev_line() elif self._context['cursor_pos'] == '$': self._move_to_last_line() def _do_immediately(self) -> None:", "_gather_candidates(self) -> None: self._selected_candidates = [] if self._denite: self._denite.gather_candidates(self._context) def _init_cursor(self) -> None:", "if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self) -> None: if", "= self._vim.current.window.height self._winwidth = self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'],", "False [self._is_async, pattern, statuses, self._entire_len, self._candidates] = self._denite.filter_candidates(self._context) prev_displayed_texts = self._displayed_texts self._update_displayed_texts() prev_matched_pattern", "'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate()", "self._context['filter_split_direction'] == 'floating': self._filter_floating = True elif split != 'no': command = self._get_direction()", "self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position',", "filter_row = winpos[0] + winheight filter_winid = self._vim.vars['denite#_filter_winid'] self._context['filter_winrow'] = row if self._vim.call('win_id2win',", "self._winid) # Restore the window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v", "typing.Any] = {} self._sources_history: typing.List[typing.Any] = [] self._previous_text = '' self._floating = False", "[x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in matches: self._vim.call('matchdelete', self._matched_range_id, self._winid)", "import SyncParent class Default(object): @property def is_async(self) -> bool: return self._is_async def __init__(self,", "# In Vim8, FileType autocmd is not fired after set filetype option. self._vim.command('silent", "'deniteMatchedRange', r'\\c' + regex_convert_py_vim(self._matched_pattern), 10, -1, {'window': self._winid}) matched_char_pattern = '[{}]'.format(re.sub( r'([\\[\\]\\\\^-])', r'\\\\\\1',", "> 0: self._vim.call('nvim_win_set_config', filter_winid, { 'relative': 'editor', 'row': filter_row, 'col': filter_col, }) self._vim.command('resize", "= status linenr = \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] =", "0) is_fit = not [x for x in self._displayed_texts if self._vim.call('strwidth', x) >", "typing.List[typing.Any]: return [ self._vim.options['columns'], self._vim.options['lines'], self._vim.call('win_getid'), self._vim.call('tabpagebuflist') ] def _switch_prev_buffer(self) -> None: if", "'col': self._context['wincol'], 'width': self._context['winwidth'], 'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos =", "self._matched_range_id = -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources:", "contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/' + ' conceal contained')", "self._selected_candidates else ' ') + ' '.join(terms).replace('\\n', '') def _get_max_height(self) -> int: return", "return self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate()", "1: self._cursor -= 1 def _move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self)", "'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: #", "not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'], self._get_max_height()) if (winminheight", "str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and self._vim.current.window.width != winwidth: self._vim.command('vertical resize", "= self._vim.current.window.width self._bufvars = self._vim.current.buffer.vars self._bufvars['denite'] = { 'buffer_name': self._context['buffer_name'], } self._bufvars['denite_statusline'] =", "None: if is_manual: candidates = self._get_selected_candidates() elif self._get_cursor_candidate(): candidates = [self._get_cursor_candidate()] else: candidates", "Disable ruler self._vim.options['ruler'] = False options['buftype'] = 'nofile' options['bufhidden'] = 'delete' options['swapfile'] =", "= self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif is_current_buffer:", "\"%{denite#get_status('sources')} \" + \" %{denite#get_status('path')}%*\" + \"%{\" + linenr + \"}%*\") else: winnr", "self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal instead of \"current.window.options\" #", "= wincol if split == 'floating': if self._context['auto_resize'] and row > 1: row", "1 while cursor < len(self._candidates): self.do_action('default', command) self._move_to_next_line() self._quit_buffer() def _cleanup(self) -> None:", "linenr + \"}%*\") else: winnr = self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%*", "# Jump to denite window self._init_buffer() self.do_action('default') candidate = self._get_cursor_candidate() if not candidate:", "window if self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] =", "self._bufnr == self._vim.current.buffer.number self._update_status() if self._check_matchdelete and self._context['match_highlight']: matches = [x['id'] for x", "is_vertical and self._vim.current.window.height != winheight: if self._floating: wincol = self._context['winrow'] row = wincol", "self._vim.vars['denite#_previewed_buffers'] = {} self._vim.command('highlight! link CursorLine CursorLine') if self._floating or self._filter_floating: self._vim.options['titlestring'] =", "'' command = ( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor']", "= self._context['input'] self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if", "(is_current_buffer and self._previous_text != self._context['input'])) if self._updated and is_changed: if not is_current_buffer: save_winid", "sources') return self._init_denite() self._gather_candidates() self._update_candidates() self._init_cursor() self._check_move_option() if self._check_do_option(): return self._init_buffer() self._update_displayed_texts() self._update_buffer()", "filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical and", "to use setlocal instead of \"current.window.options\" # \"current.window.options\" changes global value instead of", "if key == 'update_candidates': self._timers[key] = self._vim.call( 'denite#helper#_start_update_candidates_timer', self._bufnr) elif key == 'update_buffer':", "self._move_to_pos(self._cursor) else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] =", "self._move_to_pos(self._cursor) if self._context['quick_move'] and do_map(self, 'quick_move', []): return if self._context['start_filter']: do_map(self, 'open_filter_buffer', [])", "= \"printf('%'.(len(line('$'))+2).'d/%d',line('.'),line('$'))\" if self._context['statusline']: if self._floating or self._filter_floating: self._vim.options['titlestring'] = ( \"%{denite#get_status('input')}%* \"", "window self._vim.command('wincmd p') def _do_command(self, command: str) -> None: self._init_cursor() cursor = 1", "winfixheight') self._vim.command('setlocal nowrap') if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal", "self._quit_buffer() self._result = [] return def _restart(self) -> None: self._context['input'] = '' self._quit_buffer()", "= self._context['post_action'] is_quit = action['is_quit'] or post_action == 'quit' if is_quit: self.quit() self._denite.do_action(self._context,", "Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid)", "else: if self._context['split'] == 'tab': self._vim.command('tabclose!') if self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid)", "init_pos['anchor'] == 'NW': winpos = self._vim.call('nvim_win_get_position', self._winid) filter_row = winpos[0] + winheight filter_winid", "Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] = None self._selected_candidates: typing.List[int] =", "self._vim.call('winrestcmd') self._ruler = self._vim.options['ruler'] self._switch_buffer() self._bufnr = self._vim.current.buffer.number self._winid = self._vim.call('win_getid') self._resize_buffer(True) self._winheight", "self._resize_buffer(is_current_buffer) is_changed = (self._context['reversed'] or (is_current_buffer and self._previous_text != self._context['input'])) if self._updated and", "= len(self._candidates) if not self._is_async and self._context['auto_resize']: winminheight = self._context['winminheight'] max_height = min(self._context['winheight'],", "index: int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms = []", "return source_name def _get_candidate_display_text(self, index: int) -> str: source_names = self._context['source_names'] candidate =", "'relative': 'cursor', 'row': row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor, })", "self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers may be removed if", "self._init_buffer() self._update_candidates() self._update_buffer() def _start_sources_queue(self, context: UserContext) -> None: if not context['sources_queue']: return", "clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self, pos: int) -> Candidate:", "= str(self._context['direction']) if direction == 'dynamictop' or direction == 'dynamicbottom': self._update_displayed_texts() winwidth =", "= False if self._vim.current.buffer.options['filetype'] != 'denite': self._titlestring = self._vim.options['titlestring'] command = 'edit' if", "def _do_immediately(self) -> None: goto = self._winid > 0 and self._vim.call( 'win_gotoid', self._winid)", "self._candidates or pos > len(self._candidates): return {} return self._candidates[pos - 1] def _get_selected_candidates(self)", "}) elif split == 'floating_relative_window': self._vim.call( 'nvim_open_win', self._vim.call('bufnr', '%'), True, { 'relative': 'win',", ") self._vim.command(('syntax match deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match", "/^[ %s]/' + ' conceal contained') % ( self._context['selected_icon'])) if self._denite: self._denite.init_syntax(self._context, self._is_multi)", "sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim, 'run_coroutine'):", "def _start(self, sources: typing.List[typing.Any], context: UserContext) -> None: from denite.ui.map import do_map self._vim.command('silent!", "self._matched_pattern = '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring = ''", "[] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self, action_name: str, command: str", "command += ' vsplit' if split == 'vertical' else ' split' bufname =", "_move_to_pos(self, pos: int) -> None: self._vim.call('cursor', pos, 0) self._cursor = pos if self._context['reversed']:", "'col': filter_col, }) self._vim.command('resize ' + str(winheight)) if self._context['reversed']: self._vim.command('normal! zb') elif is_vertical", "_get_max_height(self) -> int: return int(self._vim.options['lines']) if not self._floating else ( int(self._vim.options['lines']) - int(self._context['winrow'])", "'sbuffer') self._vim.command( 'silent keepalt %s %s %s %s' % ( self._get_direction(), vertical, command,", "deniteSelectedLine /^[%s].*/' + ' contains=deniteConcealedMark') % ( self._context['selected_icon'])) self._vim.command(('syntax match deniteConcealedMark /^[ %s]/'", "import typing from denite.util import echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim,", "License: MIT license # ============================================================================ import re import typing from denite.util import echo,", "len(self._candidates) def _start_timer(self, key: str) -> None: if key in self._timers: return if", "self._is_async def __init__(self, vim: Nvim) -> None: self._vim = vim self._denite: typing.Optional[SyncParent] =", "layout properly self._vim.command(self._winrestcmd) self._vim.command(self._winrestcmd) clearmatch(self._vim) def _get_cursor_candidate(self) -> Candidate: return self._get_candidate(self._cursor) def _get_candidate(self,", "'vertical' and not self._floating: # Move the window to bottom self._vim.command('wincmd J') self._winrestcmd", "_init_cursor(self) -> None: if self._context['reversed']: self._move_to_last_line() else: self._move_to_first_line() def _move_to_pos(self, pos: int) ->", "self._get_wininfo() == self._prev_wininfo: # Note: execute restcmd twice to restore layout properly self._vim.command(self._winrestcmd)", "self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if", "line window. return resume = self._initialized and context['resume'] if resume: # Skip the", "= max_height elif candidates_len != self._winheight: self._winheight = candidates_len max_source_name_len = 0 if", "r'\\1', name) if re.search(r'[^a-zA-Z]', name) else name[:2]) source_name = short_name if source_names ==", "self._filter_floating = False self._updated = False self._timers: typing.Dict[str, int] = {} self._matched_range_id =", "prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources = ' '.join(statuses)", "self._winheight = 0 self._winwidth = 0 self._winminheight = -1 self._is_multi = False self._is_async", "x['word'] for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text =", "_init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg') self._vim.command('highlight link", "errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else ' ') +", "self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options if self._floating: # Disable ruler self._vim.options['ruler'] =", "_move_to_first_line(self) -> None: self._cursor = 1 def _move_to_last_line(self) -> None: self._cursor = len(self._candidates)", "_check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates and", "self._context['highlight_matched_range']) self._vim.command('highlight link deniteMatchedChar ' + self._context['highlight_matched_char']) self._vim.command('highlight default link ' + 'deniteStatusLinePath", "candidates_len > max_height: self._winheight = max_height elif candidates_len != self._winheight: self._winheight = candidates_len", "return self._vim.call('timer_stop', self._timers[key]) # Note: After timer_stop is called, self._timers may be removed", "False self._is_multi = len(sources) > 1 if not sources: # Ignore empty sources.", "filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window() # Move to denite window self._vim.call('win_gotoid', self._winid) #", "self._vim.call('win_id2win', self._winid) self._vim.call('setwinvar', winnr, '&statusline', ( \"%#deniteInput#%{denite#get_status('input')}%* \" + \"%{denite#get_status('sources')} %=\" + \"%#deniteStatusLinePath#", "= False self._timers: typing.Dict[str, int] = {} self._matched_range_id = -1 self._matched_char_id = -1", "self._get_direction() command += ' vsplit' if split == 'vertical' else ' split' bufname", "else: candidates = [] if not self._denite or not candidates or not action_name:", "for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] # Note: Have to use setlocal", "= max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split == 'vertical' if", "if is_force: self._gather_candidates() if self._update_candidates(): self._update_buffer() else: self._update_status() self._context['is_redraw'] = False def quit(self)", "'signcolumn', 'spell', 'winfixheight', 'wrap', } for k in window_options: self._save_window_options[k] = self._vim.current.window.options[k] #", "if not is_quit and is_manual: self._selected_candidates = [] self.redraw(action['is_redraw']) if is_manual and self._context['sources_queue']:", "self._vim.current.window.options[k] # Note: Have to use setlocal instead of \"current.window.options\" # \"current.window.options\" changes", "= '' self._context['quick_move'] = '' self._start_sources_queue(self._context) return def redraw(self, is_force: bool = True)", "= '' self._displayed_texts: typing.List[str] = [] self._statusline_sources = '' self._titlestring = '' self._ruler", "command = ( 'buffer' if split in ['no', 'tab', 'floating', 'floating_relative_window', 'floating_relative_cursor'] else", "'height': self._context['winheight'], }) elif split == 'floating_relative_cursor': opened_pos = (self._vim.call('nvim_win_get_position', 0)[0] + self._vim.call('winline')", "Vim8, FileType autocmd is not fired after set filetype option. self._vim.command('silent doautocmd FileType", "if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def _restart(self) -> None: self._context['input']", "self._vim.command('setlocal nolist') self._vim.command('setlocal nonumber') self._vim.command('setlocal norelativenumber') self._vim.command('setlocal nospell') self._vim.command('setlocal winfixheight') self._vim.command('setlocal nowrap') if", "' if self._context['error_messages']: inpt = '[ERROR] ' + inpt path = '[' +", "redraw(self, is_force: bool = True) -> None: self._context['is_redraw'] = is_force if is_force: self._gather_candidates()", "denite.parent import SyncParent class Default(object): @property def is_async(self) -> bool: return self._is_async def", "for x in self._candidates] buffer[:] = self._displayed_texts buffer.options['modifiable'] = False self._previous_text = self._context['input']", "[] self._statusline_sources = '' self._titlestring = '' self._ruler = False self._prev_action = ''", "'' self._prev_status: typing.Dict[str, typing.Any] = {} self._prev_curpos: typing.List[typing.Any] = [] self._save_window_options: typing.Dict[str, typing.Any]", "= -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any], context: UserContext) ->", "self._context['split'] if (split == 'no' or split == 'tab' or self._vim.call('winnr', '$') ==", "command, bufnr, ) ) else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self)", "candidate['word']).encode( encoding, errors='replace').decode(encoding, errors='replace') terms.append(abbr[:int(self._context['max_candidate_width'])]) return (str(self._context['selected_icon']) if index in self._selected_candidates else '", "else: if self._context != context: self._context.clear() self._context.update(context) self._context['sources'] = sources self._context['is_redraw'] = False", "self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options = self._vim.current.buffer.options", "self._denite = SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def", "save_winid = self._vim.call('win_getid') self._vim.call('win_gotoid', self._winid) self._init_cursor() self._move_to_pos(self._cursor) if not is_current_buffer: self._vim.call('win_gotoid', save_winid) elif", "global value instead of local in # neovim. self._vim.command('setlocal colorcolumn=') self._vim.command('setlocal conceallevel=3') self._vim.command('setlocal", "action_name, candidates) if not action: return post_action = self._context['post_action'] is_quit = action['is_quit'] or", "= {} self._bufnr = -1 self._winid = -1 self._winrestcmd = '' self._initialized =", "'short' else name return source_name def _get_candidate_display_text(self, index: int) -> str: source_names =", "def _check_do_option(self) -> bool: if self._context['do'] != '': self._do_command(self._context['do']) return True elif (self._candidates", "= -1 if self._matched_char_id in matches: self._vim.call('matchdelete', self._matched_char_id, self._winid) self._matched_char_id = -1 if", "self._candidates[pos - 1] def _get_selected_candidates(self) -> Candidates: if not self._selected_candidates: return [self._get_cursor_candidate() ]", "self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts !=", "= '[ERROR] ' + inpt path = '[' + self._context['path'] + ']' status", "len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status = status", "echo, error, clearmatch, regex_convert_py_vim from denite.util import Nvim, UserContext, Candidates, Candidate from denite.parent", "self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line()", "'line_total': len(self._candidates), } if status == self._prev_status: return self._bufvars['denite_statusline'] = status self._prev_status =", "'[{}/{}] {}'.format( self._cursor, len(self._candidates), candidate.get('abbr', candidate['word']))) if goto: # Move to the previous", "row = 0 self._context['filter_winrow'] = row + opened_pos else: anchor = 'NW' row", "int) -> str: source_names = self._context['source_names'] candidate = self._candidates[index] terms = [] if", "# FILE: default.py # AUTHOR: <NAME> <<EMAIL> at g<EMAIL>> # License: MIT license", "self._updated = True self._start_timer('update_buffer') if self._context['search'] and self._context['input']: self._vim.call('setreg', '/', self._context['input']) return self._updated", "self._vim.command('close!') def _quit_buffer(self) -> None: self._cleanup() if self._vim.call('bufwinnr', self._bufnr) < 0: # Denite", "0 self._entire_len = 0 self._result: typing.List[typing.Any] = [] self._context: UserContext = {} self._bufnr", "self._context['split'] == 'no': self._switch_prev_buffer() for k, v in self._save_window_options.items(): self._vim.current.window.options[k] = v else:", "prev_linenr = self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True", "self._displayed_texts self._update_displayed_texts() prev_matched_pattern = self._matched_pattern self._matched_pattern = pattern prev_statusline_sources = self._statusline_sources self._statusline_sources =", "str(self._prev_bufnr)) def _init_syntax(self) -> None: self._vim.command('syntax case ignore') self._vim.command('highlight default link deniteInput ModeMsg')", "and self._context['match_highlight']: matches = [x['id'] for x in self._vim.call('getmatches', self._winid)] if self._matched_range_id in", "= SyncParent(self._vim) self._result = [] context['sources_queue'] = [sources] self._start_sources_queue(context) return self._result def do_action(self,", "= int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])): self._move_to_next_line() elif", "self._statusline_sources self._statusline_sources = ' '.join(statuses) if self._is_async: self._start_timer('update_candidates') else: self._stop_timer('update_candidates') updated = (self._displayed_texts", "-1 self._winid = -1 self._winrestcmd = '' self._initialized = False self._winheight = 0", "= self._vim.call( 'matchadd', 'deniteMatchedChar', matched_char_pattern, 10, -1, {'window': self._winid}) prev_linenr = self._vim.call('line', '.')", "self._context['cursor_pos'].isnumeric(): self._cursor = int(self._context['cursor_pos']) + 1 elif re.match(r'\\+\\d+', self._context['cursor_pos']): for _ in range(int(self._context['cursor_pos'][1:])):", "split == 'tab' or self._vim.call('winnr', '$') == 1): return winheight = max(self._winheight, 1)", "autocmd! denite') if re.search(r'\\[Command Line\\]$', self._vim.current.buffer.name): # Ignore command line window. return resume", "!= prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated = True self._start_timer('update_buffer') if", "key: str) -> None: if key in self._timers: return if key == 'update_candidates':", "is_quit: self.quit() self._denite.do_action(self._context, action_name, candidates) self._result = candidates if command != '': self._vim.command(command)", "start(self, sources: typing.List[typing.Any], context: UserContext) -> typing.List[typing.Any]: if not self._denite: # if hasattr(self._vim,", "_split_floating(self, split: str) -> None: # Use floating window if split == 'floating':", "= -1 self._is_multi = False self._is_async = False self._matched_pattern = '' self._displayed_texts: typing.List[str]", "self._vim.call('line', '.') prev_candidate = self._get_cursor_candidate() buffer = self._vim.buffers[self._bufnr] buffer.options['modifiable'] = True self._vim.vars['denite#_candidates'] =", "def quit(self) -> None: if self._denite: self._denite.on_close(self._context) self._quit_buffer() self._result = [] return def", "1): return winheight = max(self._winheight, 1) winwidth = max(self._winwidth, 1) is_vertical = split", ") else: self._vim.call( 'denite#util#execute_path', f'silent keepalt {command}', bufname) def _get_direction(self) -> str: direction", "self._matched_char_id, self._winid) self._matched_char_id = -1 if self._matched_pattern != '': self._matched_range_id = self._vim.call( 'matchadd',", "= -1 self._matched_char_id = -1 self._check_matchdelete = bool(self._vim.call( 'denite#util#check_matchdelete')) def start(self, sources: typing.List[typing.Any],", "bottom self._vim.command('wincmd J') self._winrestcmd = '' return self._floating = split in [ 'floating',", "prev_displayed_texts or self._matched_pattern != prev_matched_pattern or self._statusline_sources != prev_statusline_sources) if updated: self._updated =", "'row': row, 'col': 0, 'width': width, 'height': height, 'anchor': anchor, }) elif split", "self._vim.call('nvim_win_get_config', self._winid) self._vim.call('nvim_win_set_config', self._winid, { 'relative': 'win', 'win': init_pos['win'], 'row': init_pos['row'], 'col': init_pos['col'],", "# Note: After timer_stop is called, self._timers may be removed if key in", "wincol if split == 'floating': if self._context['auto_resize'] and row > 1: row +=", "if self._cursor < len(self._candidates): self._cursor += 1 def _move_to_prev_line(self) -> None: if self._cursor", "if self._context['prompt']: self._vim.command('setlocal signcolumn=yes') else: self._vim.command('setlocal signcolumn=auto') if self._context['cursorline']: self._vim.command('setlocal cursorline') options =", "if cursor_candidate == self._get_candidate(prev_cursor): # Restore the cursor self._move_to_pos(prev_cursor) # Disable quit flag", "link ' + 'deniteStatusLinePath Comment') self._vim.command('highlight default link ' + 'deniteStatusLineNumber LineNR') self._vim.command('highlight", "self._context['split'] != 'tab': self._close_current_window() self._vim.call('win_gotoid', self._prev_winid) # Restore the position self._vim.call('setpos', '.', self._prev_curpos)", "winids = self._vim.call('win_findbuf', self._vim.vars['denite#_filter_bufnr']) if winids: # Quit filter buffer self._vim.call('win_gotoid', winids[0]) self._close_current_window()", "wincol == 1 else row + winheight filter_col = self._context['wincol'] else: init_pos =" ]
[ "= None try: if stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno())", "try: if stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file =", "= open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if", "contextlib import functools from io import UnsupportedOperation import os import sys __all__ =", "import functools from io import UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\",", "= os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield", "from io import UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager", "None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno())", "io import UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def", "not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected,", "None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp = functools.partial(_stdchannel_redirected,", "if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected,", "is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp", "yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield", "if stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename,", "temporarily redirect stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel =", "sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context", "for (mostly) internal use\"\"\" import contextlib import functools from io import UnsupportedOperation import", "iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not", "yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel,", "os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None])", "except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno())", "implemented for (mostly) internal use\"\"\" import contextlib import functools from io import UnsupportedOperation", "stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not None:", "managers implemented for (mostly) internal use\"\"\" import contextlib import functools from io import", "dest_file = None try: if stdchannel is None: yield iter([None]) else: oldstdchannel =", "None dest_file = None try: if stdchannel is None: yield iter([None]) else: oldstdchannel", "to temporarily redirect stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel", "is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout =", "2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None try: if stdchannel is", "finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None:", "os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr", "else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation,", "import contextlib import functools from io import UnsupportedOperation import os import sys __all__", "None try: if stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file", "= None dest_file = None try: if stdchannel is None: yield iter([None]) else:", "(http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None try: if stdchannel is None:", "AttributeError): yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file", "(UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if", "@contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily redirect stdout", "dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp = functools.partial(_stdchannel_redirected, None,", "context manager to temporarily redirect stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/)", "Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None try:", "stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None", "use\"\"\" import contextlib import functools from io import UnsupportedOperation import os import sys", "utf-8 -*- \"\"\"Context managers implemented for (mostly) internal use\"\"\" import contextlib import functools", "mode=\"w\"): \"\"\" A context manager to temporarily redirect stdout or stderr Originally by", "redirect stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None", "\"\"\" oldstdchannel = None dest_file = None try: if stdchannel is None: yield", "UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename,", "-*- \"\"\"Context managers implemented for (mostly) internal use\"\"\" import contextlib import functools from", "oldstdchannel = None dest_file = None try: if stdchannel is None: yield iter([None])", "[\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily", "dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr)", "open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel", "stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr =", "not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp =", "RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout) RedirectStderr = functools.partial(_stdchannel_redirected, sys.stderr) RedirectNoOp = functools.partial(_stdchannel_redirected, None, \"\")", "def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily redirect stdout or", "or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file =", "stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file", "import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A", "\"\"\"Context managers implemented for (mostly) internal use\"\"\" import contextlib import functools from io", "(mostly) internal use\"\"\" import contextlib import functools from io import UnsupportedOperation import os", "<NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None try: if stdchannel", "oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError):", "stdchannel is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode)", "coding: utf-8 -*- \"\"\"Context managers implemented for (mostly) internal use\"\"\" import contextlib import", "-*- coding: utf-8 -*- \"\"\"Context managers implemented for (mostly) internal use\"\"\" import contextlib", "A context manager to temporarily redirect stdout or stderr Originally by <NAME>, 2013", "None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout = functools.partial(_stdchannel_redirected, sys.stdout)", "yield iter([None]) finally: if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is", "\"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily redirect", "\"\"\" A context manager to temporarily redirect stdout or stderr Originally by <NAME>,", "# -*- coding: utf-8 -*- \"\"\"Context managers implemented for (mostly) internal use\"\"\" import", "oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close() RedirectStdout", "_stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily redirect stdout or stderr", "manager to temporarily redirect stdout or stderr Originally by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\"", "<filename>PyDSTool/core/context_managers.py # -*- coding: utf-8 -*- \"\"\"Context managers implemented for (mostly) internal use\"\"\"", "by <NAME>, 2013 (http://marc-abramowitz.com/archives/2013/07/19/python-context-manager-for-redirected-stdout-and-stderr/) \"\"\" oldstdchannel = None dest_file = None try: if", "iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except", "= [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager to", "dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally:", "os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\"", "import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"):", "os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is not", "mode) os.dup2(dest_file.fileno(), stdchannel.fileno()) yield except (UnsupportedOperation, AttributeError): yield iter([None]) finally: if oldstdchannel is", "__all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel, dest_filename, mode=\"w\"): \"\"\" A context manager", "import UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"] @contextlib.contextmanager def _stdchannel_redirected(stdchannel,", "functools from io import UnsupportedOperation import os import sys __all__ = [\"RedirectStdout\", \"RedirectStderr\"]", "dest_filename, mode=\"w\"): \"\"\" A context manager to temporarily redirect stdout or stderr Originally", "is None: yield iter([None]) else: oldstdchannel = os.dup(stdchannel.fileno()) dest_file = open(dest_filename, mode) os.dup2(dest_file.fileno(),", "internal use\"\"\" import contextlib import functools from io import UnsupportedOperation import os import", "if oldstdchannel is not None: os.dup2(oldstdchannel, stdchannel.fileno()) if dest_file is not None: dest_file.close()" ]
[ "See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated", "scripted ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # #", "= \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\"", "# ] # } # Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" #", "notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated in scripted ways", "\"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [", "page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in", "# ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ #", "{\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ {", "\"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # } # Testing # ------- #", "app_license = \"MIT\" # Includes in <head> # ------------------ # include js, css", "# role_home_page = { # \"Role\": \"home_page\" # } # Website user home", "# doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures", "# web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js = {\"page\"", "\"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in doctype views # doctype_js", "# page_js = {\"page\" : \"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\",", "] ] } ] # Home Pages # ---------- # application home page", "# } # Website user home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\"", "record of this doctype # website_generators = [\"Web Page\"] # Installation # ------------", "# doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\",", "# website_generators = [\"Web Page\"] # Installation # ------------ # before_install = \"pos_kiosk.install.before_install\"", "\"public/js/includes/number_to_words.js\"] # } # include js in doctype views # doctype_js = {\"doctype\"", ": \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\",", "\"Mode of Payment-logo\" ] ] ] } ] # Home Pages # ----------", "= { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = { #", "\"MIT\" # Includes in <head> # ------------------ # include js, css files in", "\"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # } #", "web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js", "], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\"", "# Document Events # --------------- # Hook on document methods and events #", "# permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission =", "doc_events = { # \"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\", #", "{ \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\",", "Website user home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators #", "\"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header of web template # web_include_css", "\"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license", "], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\"", "# Generators # ---------- # automatically create page for each record of this", "this doctype # website_generators = [\"Web Page\"] # Installation # ------------ # before_install", "import unicode_literals from . import __version__ as app_version app_name = \"pos_kiosk\" app_title =", "in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" #", "-*- coding: utf-8 -*- from __future__ import unicode_literals from . import __version__ as", "\"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" # } # } # Scheduled", "Permissions evaluated in scripted ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", #", "document methods and events # doc_events = { # \"*\": { # \"on_update\":", "# } # } # Scheduled Tasks # --------------- # scheduler_events = {", "<head> # ------------------ # include js, css files in header of desk.html #", ": \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" :", "js, css files in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js", "# ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ #", "\"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom", "# Includes in <head> # ------------------ # include js, css files in header", "[\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in doctype views # doctype_js =", "# scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\":", "] # Home Pages # ---------- # application home page (will override Website", "\"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ],", "------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" # noqa # }", "override Website Settings) # home_page = \"login\" # website user home page (by", "= { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in doctype", "# Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" #", "# \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" #", "# website user home page (by Role) # role_home_page = { # \"Role\":", "} # Document Events # --------------- # Hook on document methods and events", "\"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = { #", "# -*- coding: utf-8 -*- from __future__ import unicode_literals from . import __version__", "\"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create page for each record of", "in header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\"", "\"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # --------------- # Hook on document", "Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color", "Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------", "app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header of web template", "Permissions # ----------- # Permissions evaluated in scripted ways # permission_query_conditions = {", "coding: utf-8 -*- from __future__ import unicode_literals from . import __version__ as app_version", "page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically", "= { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # --------------- #", "\"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" # } # } #", "# ------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" # noqa #", "# \"pos_kiosk.tasks.monthly\" # ] # } # Testing # ------- # before_tests =", "evaluated in scripted ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # }", "permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = {", ": \"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } #", "utf-8 -*- from __future__ import unicode_literals from . import __version__ as app_version app_name", "[\"Web Page\"] # Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install =", "css files in header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js", "# doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} #", "\"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", #", "# app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files", "] ] ] } ] # Home Pages # ---------- # application home", "Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions", "# Permissions evaluated in scripted ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\",", "# ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ #", "# } # Document Events # --------------- # Hook on document methods and", "= \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config =", "= \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes in <head> #", "------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- #", "= \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js = {\"page\" : \"public/js/file.js\"}", "---------- # application home page (will override Website Settings) # home_page = \"login\"", "= { # \"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\":", "in doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\"", "} # Scheduled Tasks # --------------- # scheduler_events = { # \"all\": [", ": \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" :", "Events # --------------- # Hook on document methods and events # doc_events =", "Page\"] # Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\"", "page for each record of this doctype # website_generators = [\"Web Page\"] #", "--------------- # Hook on document methods and events # doc_events = { #", "doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\", \"filters\":", "\"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js = {\"page\" : \"public/js/file.js\"} #", "\"on_cancel\": \"method\", # \"on_trash\": \"method\" # } # } # Scheduled Tasks #", "# application home page (will override Website Settings) # home_page = \"login\" #", "page (will override Website Settings) # home_page = \"login\" # website user home", "\"home_page\" # } # Website user home page (by function) # get_website_user_home_page =", "Document Events # --------------- # Hook on document methods and events # doc_events", "# include js, css files in header of web template # web_include_css =", "\"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ]", "(by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create", "# ---------- # automatically create page for each record of this doctype #", "# include js in page # page_js = {\"page\" : \"public/js/file.js\"} # page_js", "web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page #", "-*- from __future__ import unicode_literals from . import __version__ as app_version app_name =", "# } # Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted", "scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [", "\"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode", "= \"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" #", "website user home page (by Role) # role_home_page = { # \"Role\": \"home_page\"", "= { # \"Role\": \"home_page\" # } # Website user home page (by", "js in page # page_js = {\"page\" : \"public/js/file.js\"} # page_js = {", "before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ #", "application home page (will override Website Settings) # home_page = \"login\" # website", "# ------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # -----------", "include js in page # page_js = {\"page\" : \"public/js/file.js\"} # page_js =", "Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] } ] # Home Pages", "------------------ # include js, css files in header of desk.html # app_include_css =", "app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon =", "Tasks # --------------- # scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\" #", "\"login\" # website user home page (by Role) # role_home_page = { #", "# \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # } # Testing # -------", "# get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create page for", "\"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] }", "create page for each record of this doctype # website_generators = [\"Web Page\"]", "# \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" #", "app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk", "of Payment-logo\" ] ] ] } ] # Home Pages # ---------- #", "Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\"", "\"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\",", "each record of this doctype # website_generators = [\"Web Page\"] # Installation #", "__future__ import unicode_literals from . import __version__ as app_version app_name = \"pos_kiosk\" app_title", "= {\"page\" : \"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] #", "ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission", "in <head> # ------------------ # include js, css files in header of desk.html", "has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # ---------------", "# ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications", "[ # \"pos_kiosk.tasks.monthly\" # ] # } # Testing # ------- # before_tests", "import __version__ as app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher =", "# \"on_trash\": \"method\" # } # } # Scheduled Tasks # --------------- #", "{ # \"Role\": \"home_page\" # } # Website user home page (by function)", "[ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] } ] #", "# home_page = \"login\" # website user home page (by Role) # role_home_page", "\"Custom Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of", "and events # doc_events = { # \"*\": { # \"on_update\": \"method\", #", "unicode_literals from . import __version__ as app_version app_name = \"pos_kiosk\" app_title = \"Pos", "Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" #", "# web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page", "function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create page", "= \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header of web template #", "# include js in doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} #", "# Home Pages # ---------- # application home page (will override Website Settings)", "\"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ]", "octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes in", "{ # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in doctype views", ": \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\":", "Pages # ---------- # application home page (will override Website Settings) # home_page", "header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" #", "# Scheduled Tasks # --------------- # scheduler_events = { # \"all\": [ #", "} ] # Home Pages # ---------- # application home page (will override", "include js in doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js", "Payment-logo\" ] ] ] } ] # Home Pages # ---------- # application", "# before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------", "------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ # #", "of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js,", "from . import __version__ as app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\"", "\"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # }", "Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk", "# # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events", "\"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # --------------- # Hook on document methods", "# ----------- # Permissions evaluated in scripted ways # permission_query_conditions = { #", "] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # } # Testing #", "for each record of this doctype # website_generators = [\"Web Page\"] # Installation", "app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description =", "= \"<EMAIL>\" app_license = \"MIT\" # Includes in <head> # ------------------ # include", "# \"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" #", "= {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [", "# \"on_cancel\": \"method\", # \"on_trash\": \"method\" # } # } # Scheduled Tasks", "in page # page_js = {\"page\" : \"public/js/file.js\"} # page_js = { #", "\"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\"", "# ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ #", "[ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], #", "# \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" # } # }", "Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] } ] # Home Pages #", "= \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = {", "# \"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\":", "Scheduled Tasks # --------------- # scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\"", "[ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] #", "= {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\", \"filters\": [", "\"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js =", "# } # include js in doctype views # doctype_js = {\"doctype\" :", "# ------------------ # include js, css files in header of desk.html # app_include_css", "doctype # website_generators = [\"Web Page\"] # Installation # ------------ # before_install =", "\"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] } ] # Home", "} # Website user home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" #", "= \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header", "in scripted ways # permission_query_conditions = { # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } #", "\"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config", "Home Pages # ---------- # application home page (will override Website Settings) #", "Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions #", "# \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\",", "\"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [", "{\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\"", "# --------------- # Hook on document methods and events # doc_events = {", "{\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\"", "automatically create page for each record of this doctype # website_generators = [\"Web", "Hook on document methods and events # doc_events = { # \"*\": {", "} # include js in doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"}", "as app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description", "# \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" #", "app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email =", "\"pos_kiosk.tasks.monthly\" # ] # } # Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\"", "# \"Role\": \"home_page\" # } # Website user home page (by function) #", "desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css", "{ # \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" # } #", "App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license =", "= \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated in scripted ways #", "# has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events #", "doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures =", "# Website user home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators", "# ---------- # application home page (will override Website Settings) # home_page =", "[ [ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ]", "app_color = \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes in <head>", "fixtures = [ { \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\", [", "page (by Role) # role_home_page = { # \"Role\": \"home_page\" # } #", "---------- # automatically create page for each record of this doctype # website_generators", "\"method\" # } # } # Scheduled Tasks # --------------- # scheduler_events =", "doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js", "------------ # before_install = \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications #", "(by Role) # role_home_page = { # \"Role\": \"home_page\" # } # Website", "\"on_trash\": \"method\" # } # } # Scheduled Tasks # --------------- # scheduler_events", "# See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions", "= \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create page for each record", "\"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"}", "--------------- # scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\" # ], #", "page_js = {\"page\" : \"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"]", "Includes in <head> # ------------------ # include js, css files in header of", "], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\"", "{\"page\" : \"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # }", "# doc_events = { # \"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\",", "\"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header of", "[ \"name\", \"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ]", "# app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in header of web", "= { # \"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [ #", "\"daily\": [ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ],", "{ # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # --------------- # Hook", "# notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated in scripted", "template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in", "# \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # }", "role_home_page = { # \"Role\": \"home_page\" # } # Website user home page", "home page (will override Website Settings) # home_page = \"login\" # website user", "app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js, css files in", "events # doc_events = { # \"*\": { # \"on_update\": \"method\", # \"on_cancel\":", "doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js", "Website Settings) # home_page = \"login\" # website user home page (by Role)", "methods and events # doc_events = { # \"*\": { # \"on_update\": \"method\",", "{ # \"all\": [ # \"pos_kiosk.tasks.all\" # ], # \"daily\": [ # \"pos_kiosk.tasks.daily\"", "# before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods", "user home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ----------", "(will override Website Settings) # home_page = \"login\" # website user home page", "web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js = {\"page\" :", "# \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document Events # --------------- # Hook on", "# include js, css files in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\"", "= \"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon", "= \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\"", "\"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" # }", "\"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon", "# automatically create page for each record of this doctype # website_generators =", "# Permissions # ----------- # Permissions evaluated in scripted ways # permission_query_conditions =", "app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes in <head> # ------------------ #", "css files in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js =", "\"in\", [ \"Sales Invoice Item-pos_kiosk\", \"Mode of Payment-logo\" ] ] ] } ]", "include js, css files in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" #", "from __future__ import unicode_literals from . import __version__ as app_version app_name = \"pos_kiosk\"", "} # } # Scheduled Tasks # --------------- # scheduler_events = { #", "# \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\":", "{ # \"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\"", "] } ] # Home Pages # ---------- # application home page (will", "# } # # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } #", "doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" :", "# page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js", "files in header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\"", "frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated in", "----------- # Permissions evaluated in scripted ways # permission_query_conditions = { # \"Event\":", "= [\"Web Page\"] # Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" # after_install", "home_page = \"login\" # website user home page (by Role) # role_home_page =", "of this doctype # website_generators = [\"Web Page\"] # Installation # ------------ #", "files in header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js =", "= \"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\"", "= \"login\" # website user home page (by Role) # role_home_page = {", "= \"pos_kiosk.install.before_install\" # after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See", "# } # Scheduled Tasks # --------------- # scheduler_events = { # \"all\":", "\"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ]", "# ] # \"monthly\": [ # \"pos_kiosk.tasks.monthly\" # ] # } # Testing", "get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- # automatically create page for each", "\"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\": [", "# \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] # \"monthly\":", "\"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email", "# Hook on document methods and events # doc_events = { # \"*\":", "} # # has_permission = { # \"Event\": \"frappe.desk.doctype.event.event.has_permission\", # } # Document", "= \"MIT\" # Includes in <head> # ------------------ # include js, css files", "views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"}", "user home page (by Role) # role_home_page = { # \"Role\": \"home_page\" #", "on document methods and events # doc_events = { # \"*\": { #", "include js, css files in header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\"", "[ # \"pos_kiosk.tasks.daily\" # ], # \"hourly\": [ # \"pos_kiosk.tasks.hourly\" # ], #", ". import __version__ as app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher", "Role) # role_home_page = { # \"Role\": \"home_page\" # } # Website user", "page # page_js = {\"page\" : \"public/js/file.js\"} # page_js = { # \"kiosk\":", "website_generators = [\"Web Page\"] # Installation # ------------ # before_install = \"pos_kiosk.install.before_install\" #", "js in doctype views # doctype_js = {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js =", "{ # \"Event\": \"frappe.desk.doctype.event.event.get_permission_query_conditions\", # } # # has_permission = { # \"Event\":", "# \"*\": { # \"on_update\": \"method\", # \"on_cancel\": \"method\", # \"on_trash\": \"method\" #", "# Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\":", "\"method\", # \"on_trash\": \"method\" # } # } # Scheduled Tasks # ---------------", "\"Role\": \"home_page\" # } # Website user home page (by function) # get_website_user_home_page", "\"public/js/file.js\"} # page_js = { # \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include", "after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config", "app_publisher = \"9t9it\" app_description = \"Kiosk App\" app_icon = \"octicon octicon-file-directory\" app_color =", "header of desk.html # app_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # app_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include", "# Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods #", "[ { \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales Invoice", "= {\"doctype\" : \"public/js/doctype.js\"} # doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js =", "= \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include js in page # page_js", "\"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\"", "\"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js = {\"doctype\" : \"public/js/doctype_calendar.js\"}", "Settings) # home_page = \"login\" # website user home page (by Role) #", "home page (by Role) # role_home_page = { # \"Role\": \"home_page\" # }", "# \"kiosk\": [\"public/js/pos_page_js.js\", \"public/js/includes/number_to_words.js\"] # } # include js in doctype views #", "= [ { \"doctype\": \"Custom Field\", \"filters\": [ [ \"name\", \"in\", [ \"Sales", "Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\"", "Generators # ---------- # automatically create page for each record of this doctype", "= {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} # doctype_calendar_js =", "{\"doctype\" : \"public/js/doctype_calendar.js\"} fixtures = [ { \"doctype\": \"Custom Field\", \"filters\": [ [", "__version__ as app_version app_name = \"pos_kiosk\" app_title = \"Pos Kiosk\" app_publisher = \"9t9it\"", "# Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config # notification_config = \"pos_kiosk.notifications.get_notification_config\" #", "of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" # web_include_js = \"/assets/pos_kiosk/js/pos_kiosk.js\" # include", "\"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes in <head> # ------------------", "] # } # Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding", "[ # \"pos_kiosk.tasks.hourly\" # ], # \"weekly\": [ # \"pos_kiosk.tasks.weekly\" # ] #", "# --------------- # scheduler_events = { # \"all\": [ # \"pos_kiosk.tasks.all\" # ],", "# doctype_list_js = {\"doctype\" : \"public/js/doctype_list.js\"} # doctype_tree_js = {\"doctype\" : \"public/js/doctype_tree.js\"} #", "app_icon = \"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\"", "\"octicon octicon-file-directory\" app_color = \"grey\" app_email = \"<EMAIL>\" app_license = \"MIT\" # Includes", "\"<EMAIL>\" app_license = \"MIT\" # Includes in <head> # ------------------ # include js,", "js, css files in header of web template # web_include_css = \"/assets/pos_kiosk/css/pos_kiosk.css\" #", "} # Testing # ------- # before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods", "Methods # ------------------------------ # # override_whitelisted_methods = { # \"pos_bahrain.api.get_item_details.get_item_details\": \"pos_kiosk.api.item.get_item_details\" # noqa", "# after_install = \"pos_kiosk.install.after_install\" # Desk Notifications # ------------------ # See frappe.core.notifications.get_notification_config #", "home page (by function) # get_website_user_home_page = \"pos_kiosk.utils.get_home_page\" # Generators # ---------- #", "\"pos_kiosk.notifications.get_notification_config\" # Permissions # ----------- # Permissions evaluated in scripted ways # permission_query_conditions", "before_tests = \"pos_kiosk.install.before_tests\" # Overriding Whitelisted Methods # ------------------------------ # # override_whitelisted_methods =" ]
[ "neural network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return", "SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def default_config(): config", "config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg", "activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use", "self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a", "64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story =", "default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg)", "KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg", "), name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc =", "import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm", "32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden", "= model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question", "Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\"", "self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\"", "Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\"", "eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc)", "pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\"", "story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size,", "eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3,", "class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def default_config():", "conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response)", "Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use", "= Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response", "axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story,", "= Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question", "1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story,", "eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc)", "__init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story =", "= LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response)", "eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response", "= Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story)", "a simple lstm neural network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden']", "question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural", "conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size,", "return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden =", "config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden']", "super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ),", "Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc", "<gh_stars>1-10 from keras import Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape,", "eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3,", "eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story =", "eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question)", "Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question", "concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden,", "= Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model", "padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden,", "= Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response", "eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16,", "hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question')", "eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16,", "Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\"", "padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question", "3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question =", "concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model", "EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def default_config(): config", "), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story =", "Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response", "config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def", "Use a simple lstm neural network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config()", "name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc = Reshape((1,", "Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response =", "metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def", "Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question =", "3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc =", "3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) #", "model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen,", "= Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story)", "import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import", "ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def default_config(): config", "self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a", "Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story", "Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel class", "= KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ =", "= self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story", "metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def", "dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy',", "def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story", "= Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel):", "from keras import Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute,", "Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm", "_create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ),", "= Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story)", "padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3,", "default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg)", "response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question],", "recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy'])", "self._cfg = model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story')", "padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size,", "def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg):", "eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question =", "keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple", "name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32,", "loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod", "= 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self):", "def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen,", "= Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel):", "simple lstm neural network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] =", "= Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3,", "Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten", "Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story) eb_story", "= Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1)", "), name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2,", "64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question =", "self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story')", "network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config", "= concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response =", "conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc)", "3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response =", "config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_", "= cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question", "\"\"\" Use a simple lstm neural network \"\"\" @staticmethod def default_config(): config =", "from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from", "class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def default_config():", "conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response", "= Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question],", "= KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg =", "Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response =", "KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg", "Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc", "Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response =", "# eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2,", "Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D,", "from keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a", "super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ),", "# eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question)", "\"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def", "Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story)", "Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size,", "= Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response =", "config['hidden'] = 32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def", "config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden']", "question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural", "3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64,", "= Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple", "question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc)", "64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2,", "eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story, eb_question],", "= Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc", "name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64,", "= Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question)", "= Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question)", "int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size,", "self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story =", "), name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story =", "eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model =", "Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32,", "Convolution1D(64, 3, padding='same')(eb_question) eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question)", "__init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story =", "self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc =", "Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import", "Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story", "question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3,", "def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self, cfg):", "Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story = Flatten()(eb_story) eb_question", "32 return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden", "question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question", "= Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question =", "= Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question =", "name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc)", "story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story,", "= Flatten()(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response", "Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel):", "name='story') question = Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story)", "activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use", "question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc = Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2,", "LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base", "@staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32 return config def __init__(self,", "= Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question)", "padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question = Flatten()(eb_question) conc = concatenate([eb_story,", "concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from", "model_cfg def _create_network_(self): hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question =", "response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class", "class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def default_config():", "cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen,", "_create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ),", "= self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc", "Permute((2, 1))(conc) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model =", "eb_question = Convolution1D(32, 3, padding='same')(eb_question) eb_question = Convolution1D(16, 3, padding='same')(eb_question) # eb_question =", "hidden = self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question')", "KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod def", "self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story =", "= concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response)", "Adam from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural", "Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm", "padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) # eb_story", "= self._cfg['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') eb_story", "Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1) response = LSTM(hidden,", "keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers", "import Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout,", "= Convolution1D(64, 3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3,", "= Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple", "import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod", "lstm neural network \"\"\" @staticmethod def default_config(): config = KerasModel.default_config() config['hidden'] = 32", "name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question", "self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\"", "), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question)", "keras import Model, Input from keras.layers import Dense, concatenate, LSTM, Reshape, Permute, Embedding,", "outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network", "= 32 return config def __init__(self, model_cfg): super().__init__(model_cfg) self._cfg = model_cfg def _create_network_(self):", "Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc = Reshape((1, int(conc.shape[1])))(conc) conc =", "return config def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden =", "Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story, eb_question], axis=1)", "cfg def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question =", "def _create_network_(self): hidden = self._cfg_['hidden'] story = Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen,", "= Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],)", "LSTM(hidden, dropout=0.2, recurrent_dropout=0.2)(conc) response = Dense(self._vocab_size, activation='softmax')(response) self._model = Model(inputs=[story, question], outputs=response) self._model.compile(optimizer=Adam(lr=2e-4),", "eb_story = Flatten()(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Convolution1D(64, 3, padding='same')(eb_question) eb_question", "3, padding='same')(eb_story) eb_story = Convolution1D(32, 3, padding='same')(eb_story) eb_story = Convolution1D(16, 3, padding='same')(eb_story) #", "outputs=response) self._model.compile(optimizer=Adam(lr=2e-4), loss='sparse_categorical_crossentropy', metrics=['accuracy']) class EmbedLSTM(KerasModel): \"\"\" Use a simple lstm neural network", "= Input((self._query_maxlen, ), name='question') eb_story = Embedding(self._vocab_size, 64)(story) eb_story = Convolution1D(64, 3, padding='same')(eb_story)", "Input((self._story_maxlen, ), name='story') question = Input((self._query_maxlen, ), name='question') conc = concatenate([story, question],) conc", "Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam from pypagai.models.base import KerasModel", "Dense, concatenate, LSTM, Reshape, Permute, Embedding, Dropout, Convolution1D, Flatten from keras.optimizers import Adam", "def __init__(self, cfg): super().__init__(cfg) self._cfg_ = cfg def _create_network_(self): hidden = self._cfg_['hidden'] story", "loss='sparse_categorical_crossentropy', metrics=['accuracy']) class ConvLSTM(KerasModel): \"\"\" Use a simple lstm neural network \"\"\" @staticmethod", "from pypagai.models.base import KerasModel class SimpleLSTM(KerasModel): \"\"\" Use a simple lstm neural network", "eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc = concatenate([eb_story,", "64)(story) eb_story = Dropout(0.3)(eb_story) eb_question = Embedding(self._vocab_size, 64)(question) eb_question = Dropout(0.3)(eb_question) conc =" ]
[]
[ "import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone):", "for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block,", "from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self,", "activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x): output_list = []", "= 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(),", "= activationName self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def", "bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output *", "planes) self.in_planes = planes def forward(self, x): output_list = [] for block in", "self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block", "def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel", "easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__", "layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output *", "# Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search ''' from", "num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(),", "Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import", "self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes,", "down_block, planes) self.in_planes = planes def forward(self, x): output_list = [] for block", "planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes =", "from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from", "self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output,", "Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone", "* 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for _ in", "= num_cells self.block = block self.activation_name = activationName self.bn_name = bnName self.first_output =", "stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2)", "bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2,", "stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x): output_list", "num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB)", "self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output", "out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output", "bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block =", "output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def", "from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import *", "NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import", "block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block =", "#!/usr/bin/env python # -*- coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper:", "= bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels =", "= self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def", "def forward(self, x): output_list = [] for block in self._modules.values(): x = block(x)", "pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model", "planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x):", "self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name)", "= [] for block in self._modules.values(): x = block(x) output_list.append(x) return output_list def", "return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model", "activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells)", "= data_channel self.num_cells = num_cells self.block = block self.activation_name = activationName self.bn_name =", "-*- coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture", "self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4)", "output_list = [] for block in self._modules.values(): x = block(x) output_list.append(x) return output_list", "BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA,", "model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB) model.set_name(BackboneName.PNASNetB) return", "self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6,", "python # -*- coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper: Progressive", "* 4, self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block =", "= PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel,", "in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes", "self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4,", "self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x): output_list = [] for", "self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name)", "ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def", "= [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False,", "downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes", "block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA)", "* 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes,", "kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output *", "def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes)", "self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for", "Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from", "x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44,", "* from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A',", "''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import", "def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3,", "temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2,", "import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6,", "= planes def forward(self, x): output_list = [] for block in self._modules.values(): x", "num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block", "= num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index =", "self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block = block self.activation_name = activationName", "layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output)", "self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self,", "self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index", "def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1,", "[] for block in self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel):", "[] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name,", "block in self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model =", "def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel):", "num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells", "= ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU):", "bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self, x): output_list =", "import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ =", "model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model =", "self.data_channel = data_channel self.num_cells = num_cells self.block = block self.activation_name = activationName self.bn_name", "= ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output,", "output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return", "Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name", "import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block", "__init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells", "activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block = block self.activation_name", "num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0", "PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6,", "__all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d,", "class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel", "bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block = block", "bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = []", "down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes", "planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes):", "easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block", "# -*- coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper: Progressive Neural", "x): output_list = [] for block in self._modules.values(): x = block(x) output_list.append(x) return", "range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes =", "CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA,", "num_cells=6, num_planes=44, block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32,", "Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name", "data_channel self.num_cells = num_cells self.block = block self.activation_name = activationName self.bn_name = bnName", "easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from", "self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list()", "from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB", "'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA)", "self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels", "num_cells self.block = block self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes", "CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44,", "PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel =", "coding:utf-8 -*- # Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search", "import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import", "block self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output", "block=CellA) model.set_name(BackboneName.PNASNetA) return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB) model.set_name(BackboneName.PNASNetB)", "= planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(),", "activationName self.bn_name = bnName self.first_output = num_planes self.in_planes = self.first_output self.create_block_list() def create_block_list(self):", "data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells =", "temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes", "super().__init__() self.set_name(BackboneName.PNASNetA) self.data_channel = data_channel self.num_cells = num_cells self.block = block self.activation_name =", "self.in_planes = planes def forward(self, x): output_list = [] for block in self._modules.values():", "self.num_cells = num_cells self.block = block self.activation_name = activationName self.bn_name = bnName self.first_output", "PNASNet in PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType,", "in PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType", "= block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=44, block=CellA)", "2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def", "''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import", "PyTorch. Paper: Progressive Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from", "for block in self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model", "from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B']", "self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel,", "planes def forward(self, x): output_list = [] for block in self._modules.values(): x =", "self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output", "create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1,", "activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes,", "ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells)", "4, self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes,", "planes) self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name,", "self.block = block self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes self.in_planes", "self.block_out_channels = [] self.index = 0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1,", "0 layer1 = ConvBNActivationBlock(in_channels=self.data_channel, out_channels=self.first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1,", "stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block", "easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3,", "self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self, planes): down_block = self.block(self.in_planes, planes,", "make_layer(self, planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name,", "padding=1, bias=False, bnName=self.bn_name, activationName=self.activation_name) self.add_block_list(layer1.get_name(), layer1, self.first_output) self.make_layer(self.first_output, self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output", "['pnasnet_A', 'pnasnet_B'] class PNASNet(BaseBackbone): def __init__(self, data_channel=3, num_cells=6, num_planes=44, block=CellA, bnName=NormalizationType.BatchNormalize2d, activationName=ActivationType.ReLU): super().__init__()", "in self._modules.values(): x = block(x) output_list.append(x) return output_list def pnasnet_A(data_channel): model = PNASNet(data_channel=data_channel,", "forward(self, x): output_list = [] for block in self._modules.values(): x = block(x) output_list.append(x)", "2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells):", "planes def downsample(self, planes): down_block = self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block,", "return model def pnasnet_B(data_channel): model = PNASNet(data_channel=data_channel, num_cells=6, num_planes=32, block=CellB) model.set_name(BackboneName.PNASNetB) return model", "self.num_cells) self.downsample(self.first_output * 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output *", "4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for _ in range(num_cells):", "= self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1 =", "easyai.model.base_block.utility.utility_block import ConvBNActivationBlock from easyai.model.base_block.cls.pnasnet_block import CellA, CellB __all__ = ['pnasnet_A', 'pnasnet_B'] class", "self.block(self.in_planes, planes, stride=2, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(down_block.get_name(), down_block, planes) self.in_planes = planes def forward(self,", "_ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes)", "planes, num_cells): for _ in range(num_cells): temp_block = self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name)", "* 2) self.make_layer(self.first_output * 2, self.num_cells) self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells)", "Neural Architecture Search ''' from easyai.base_name.block_name import NormalizationType, ActivationType from easyai.base_name.backbone_name import BackboneName", "self.in_planes = self.first_output self.create_block_list() def create_block_list(self): self.block_out_channels = [] self.index = 0 layer1", "ActivationType from easyai.base_name.backbone_name import BackboneName from easyai.model.backbone.utility.base_backbone import * from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock", "self.downsample(self.first_output * 4) self.make_layer(self.first_output * 4, self.num_cells) def make_layer(self, planes, num_cells): for _", "= self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def", "self.block(self.in_planes, planes, stride=1, bn_name=self.bn_name, activation_name=self.activation_name) self.add_block_list(temp_block.get_name(), temp_block, planes) self.in_planes = planes def downsample(self,", "= block self.activation_name = activationName self.bn_name = bnName self.first_output = num_planes self.in_planes =", "-*- # Author: ''' PNASNet in PyTorch. Paper: Progressive Neural Architecture Search '''" ]
[ "self.download_done_signal.emit() except Exception as e: if self.logger is not None: self.logger.error(e) if __name__", "not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox()", "def generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI,", "math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical Graphics", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 63, \"endY\": 31, \"startX\":", "math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas =", "generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced", "y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir,", "except Exception as e: resp = None time.sleep(3) if resp is None: return", "= { \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data", "\"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token def", "data and \" \"information funded by the European Union - EU-DEM layers\", \"available\":", "import logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox", "def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q,", "as f: json.dump(metadatas, f) except Exception as e: if self.logger is not None:", "chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x, y, z, file_path) return", "Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and \" \"information funded", "not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = [] for i", "self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z):", "= '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y,", "try: with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk)", "_url = self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except", "0 } ], [ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0", "y=y, z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain',", "continue break except Exception as e: resp = None time.sleep(3) if resp is", "], [ { \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0 } ],", "logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng,", "{ \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 } ], [ {", "param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count >", "resp.status_code != 200: return -1 try: with open(file_path, 'wb') as f: for chunk", "access_token} while True: if requre_count > 4: break try: _url = self.get_url(x, y,", "{ \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0 } ], [ {", "\" \"SRTM between 60N and 60S, 30m Europe. Minimum global coverage of 1000m.\",", "], \"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK World Terrain Premium Tileset,", "url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break try: request_count +=", "# coding=utf-8 import json import os import math import logging import requests import", "1023, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 4095, \"endY\": 2047,", "\"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f)", "{ \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0 } ], [ {", "1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\":", "max_lat = 90.0 start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng,", "], [ { \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0 } ],", "logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng", "timeout=2) if resp.status_code != 200: continue break except Exception as e: resp =", "- 30m resolution CONUS, 30m resolution \" \"SRTM between 60N and 60S, 30m", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 1023, \"endY\": 511, \"startX\":", "= \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir,", "self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox,", "y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0", "0 } ], [ { \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0", "[ { \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0 } ], [", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 511, \"endY\": 255, \"startX\":", "None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for", "= os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0", "], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\",", "self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox", "in self.threads: thread.start() for thread in self.threads: thread.wait() for t in self.threads: t.stop()", "0 } ], [ { \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0", "} ], [ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 }", "except Exception as e: if self.logger is not None: self.logger.exception(e) def run(self): try:", "resolution CONUS, 30m resolution \" \"SRTM between 60N and 60S, 30m Europe. Minimum", "count += _count self.division_done_signal.emit(count) for bbox in bboxs: while True: if not self.running:", "while True: if request_count > 4: break try: request_count += 1 param =", "2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 4095,", "Inc., © CGIAR-CSI, Produced using Copernicus data and \" \"information funded by the", "'__main__': if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s')", "\"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\"", "[ { \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0 } ], [", "] ], \"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK World Terrain Premium", "{ \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0 } ], [ {", "if chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x, y, z, file_path)", "} ], [ { \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0 }", "metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception", "{ \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 } ], [ {", "\"description\": \"STK World Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m", "and 60S, 30m Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\",", "Exception as e: if self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata()", "[ { \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0 } ], [", "1 def generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical Graphics Inc., ©", "is None: return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL =", "self.division_done_signal.emit(count) for bbox in bboxs: while True: if not self.running: time.sleep(0.01) else: break", "self.threads.append(thread) for thread in self.threads: thread.start() for thread in self.threads: thread.wait() for t", "request_count += 1 param = {'access_token': token} resp = requests.get(url, params=param, timeout=2) if", "None: self.logger.error(e) if __name__ == '__main__': if 1: logger = logging.getLogger('down') try: root", "def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x,", "format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return", "for bbox in bboxs: while True: if not self.running: time.sleep(0.01) else: break task_q", "logger, write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox, z): min_x,", "return -1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = ''", "resp.status_code != 200: continue break except Exception as e: resp = None time.sleep(3)", "start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out')", "stream=True, timeout=2) break except Exception as e: resp = None time.sleep(3) requre_count +=", "os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f:", "\"startY\": 0 } ], [ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\":", "root_dir = '' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox,", "if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = [] for", "time.sleep(3) requre_count += 1 if resp is None: return -1 if resp.status_code !=", "= token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x,", "% (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y,", "{ \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and", "def get_access_token(token): resp = None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True:", "\"startY\": 0 } ] ], \"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK", "], [ { \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0 } ],", "Union - EU-DEM layers\", \"available\": [ [ { \"endX\": 1, \"endY\": 0, \"startX\":", "token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token", "= TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as", "if self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0", "\"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 } ] ], \"bounds\": [-180,", "root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir =", "= '' access_token = get_access_token(self.token) if access_token is None: return -1 param =", "0, \"startY\": 0 } ], [ { \"endX\": 15, \"endY\": 7, \"startX\": 0,", "as e: resp = None time.sleep(3) requre_count += 1 if resp is None:", "_download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y,", "hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr)", "bboxs = self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count += _count", "y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp", "self.logger is not None: self.logger.error(e) if __name__ == '__main__': if 1: logger =", "log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng =", "in bboxs: while True: if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox)", "\"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "0 } ], [ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0", "\"STK World Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution", "], [ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 } ]", "\"startY\": 0 } ], [ { \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\":", "+= 1 param = {'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code", "\"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK World Terrain Premium Tileset, v1.3.", "e: return -1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir =", "logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat = -90.0 max_lat", "with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except", "if request_count > 4: break try: request_count += 1 param = {'access_token': token}", "import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count = 0", "return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if", "\"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir", "\"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\":", "resp is None: return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL", "\"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "30m resolution CONUS, 30m resolution \" \"SRTM between 60N and 60S, 30m Europe.", "60N and 60S, 30m Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\",", "self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs: while True: if not", "self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e: if self.logger", "= requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e: resp = None", "bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x,", "break except Exception as e: resp = None time.sleep(3) if resp is None:", "= self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs: while True: if", "\"endY\": 31, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 127, \"endY\":", "[ { \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0 } ], [", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 15, \"endY\": 7, \"startX\":", "self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self,", "write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat))", "[ { \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0 } ], [", "4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 8191,", "not None: self.logger.error(e) if __name__ == '__main__': if 1: logger = logging.getLogger('down') try:", "{ \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0 } ], [ {", "z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z,", "logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat = -90.0 max_lat = 90.0", "params=param, timeout=2) if resp.status_code != 200: continue break except Exception as e: resp", "resp = None time.sleep(3) requre_count += 1 if resp is None: return -1", "_count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs: while True:", "break except Exception as e: resp = None time.sleep(3) requre_count += 1 if", "\"startY\": 0 } ], [ { \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\":", "f.write(chunk) except Exception as e: return -1 self._data2DB(x, y, z, file_path) return 1", "Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\":", "} ], [ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 }", "with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as e: if self.logger", "63, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 255, \"endY\": 127,", "DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count = 0 url", "y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path =", "task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread in", "range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread", "f) except Exception as e: if self.logger is not None: self.logger.exception(e) def run(self):", "\"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "= None requre_count = 0 _url = '' access_token = get_access_token(self.token) if access_token", "\"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir,", "= requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break except Exception as", "logger.error('main thread out') except Exception as e: logger.error(e) if 0: accessToken = get_access_token()", "'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as e: if", "90, ], \"description\": \"STK World Terrain Premium Tileset, v1.3. 10m - 30m resolution", "using Copernicus data and \" \"information funded by the European Union - EU-DEM", "json.dump(metadatas, f) except Exception as e: if self.logger is not None: self.logger.exception(e) def", "= '' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num,", "in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e: if", "} ], [ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 }", "def run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox in", "31, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 127, \"endY\": 63,", "return -1 try: with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if", "access_token = get_access_token(self.token) if access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask',", "4: break try: _url = self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True,", "{ \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 } ] ], \"bounds\":", "None time.sleep(3) if resp is None: return None resp_json = resp.json() return resp_json.get('accessToken')", "as e: if self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count", "\"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e:", "count = 0 bboxs = self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox)", "bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs: while", "31, \"endY\": 15, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 63,", "'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng =", "1 if resp is None: return -1 if resp.status_code != 200: return -1", "[ { \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0 } ], [", "[ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 } ] ],", "resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e: resp =", "for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return -1", "-90.0 max_lat = 90.0 start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat,", "= -90.0 max_lat = 90.0 start_zoom = 0 end_zoom = 5 bbox =", "\"endY\": 4095, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 16383, \"endY\":", "200: continue break except Exception as e: resp = None time.sleep(3) if resp", "= logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file", "of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0,", "180, 90, ], \"description\": \"STK World Terrain Premium Tileset, v1.3. 10m - 30m", "if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None", "Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and \" \"information funded by", "v1.3. 10m - 30m resolution CONUS, 30m resolution \" \"SRTM between 60N and", "time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e) if 0: accessToken =", "0, \"startY\": 0 } ], [ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0,", "0, \"startY\": 0 } ], [ { \"endX\": 511, \"endY\": 255, \"startX\": 0,", "\"endY\": 16383, \"startX\": 0, \"startY\": 0 } ] ], \"bounds\": [-180, -90, 180,", "Exception as e: resp = None time.sleep(3) requre_count += 1 if resp is", "in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for", "def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y =", "\"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with", "import os import math import logging import requests import time from map_download.cmd.BaseDownloader import", "None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True:", "\"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True)", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 31, \"endY\": 15, \"startX\":", "self.threads = [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q,", "for thread in self.threads: thread.start() for thread in self.threads: thread.wait() for t in", "requre_count = 0 _url = '' access_token = get_access_token(self.token) if access_token is None:", "token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y,", "-1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count", "and \" \"information funded by the European Union - EU-DEM layers\", \"available\": [", "\"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "\"startY\": 0 } ], [ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\":", "[ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 } ], [", "file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat = -90.0", "layers\", \"available\": [ [ { \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0", "\" \"information funded by the European Union - EU-DEM layers\", \"available\": [ [", "= logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat", "for bbox in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox", "self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token,", "as e: if self.logger is not None: self.logger.error(e) if __name__ == '__main__': if", "logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr)", "0 } ], [ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0", "= resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox,", "], [ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 } ],", "} ], [ { \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0 }", "0, \"startY\": 0 } ], [ { \"endX\": 3, \"endY\": 1, \"startX\": 0,", "= None time.sleep(3) requre_count += 1 if resp is None: return -1 if", "if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr", "z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url", "32767, \"endY\": 16383, \"startX\": 0, \"startY\": 0 } ] ], \"bounds\": [-180, -90,", "'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count > 4: break try:", "{ \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0 } ], [ {", "} ], [ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 }", "import json import os import math import logging import requests import time from", "exist_ok=True) resp = None requre_count = 0 _url = '' access_token = get_access_token(self.token)", "\"startY\": 0 } ], [ { \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\":", "return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url = ''", "\"endY\": 127, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 511, \"endY\":", "\"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "1 param = {'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code !=", "= 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom = 0 end_zoom =", "resp = None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count", "is not None: self.logger.error(e) if __name__ == '__main__': if 1: logger = logging.getLogger('down')", "\"startY\": 0 } ], [ { \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\":", "is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs =", "= None time.sleep(3) if resp is None: return None resp_json = resp.json() return", "0 } ], [ { \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0", "requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp", "task_q = self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir,", "if resp.status_code != 200: continue break except Exception as e: resp = None", "[ { \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0 } ], [", "} ], [ { \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0 }", "coding: utf-8 -*- # coding=utf-8 import json import os import math import logging", "thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread in self.threads: thread.wait() for", "requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break except Exception as e:", "in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x, y,", "{ \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0 } ], [ {", "break task_q = self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread =", "r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr =", "__init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger,", "bbox in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in", "os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url = '' access_token =", "511, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 2047, \"endY\": 1023,", "open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as e: if self.logger is", "{ \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0 } ], [ {", "127, \"endY\": 63, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 255,", "(self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z)", "'' access_token = get_access_token(self.token) if access_token is None: return -1 param = {'extensions':", "0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break try: request_count", "European Union - EU-DEM layers\", \"available\": [ [ { \"endX\": 1, \"endY\": 0,", "self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count =", "\"endY\": 1023, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 4095, \"endY\":", "y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e:", "511, \"endY\": 255, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 1023,", "global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13,", "], [ { \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0 } ],", "= r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr", "if resp is None: return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread):", "def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db)", "coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\":", "root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db,", "token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token", "0, \"startY\": 0 } ], [ { \"endX\": 7, \"endY\": 3, \"startX\": 0,", "[-180, -90, 180, 90, ], \"description\": \"STK World Terrain Premium Tileset, v1.3. 10m", "Exception as e: return -1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine):", "[] self.download_done_signal.emit() except Exception as e: if self.logger is not None: self.logger.error(e) if", "3, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 15, \"endY\": 7,", "is None: return -1 if resp.status_code != 200: return -1 try: with open(file_path,", "BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count = 0 url =", "bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) +", "max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y)", "return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' %", "self.threads = [] self.download_done_signal.emit() except Exception as e: if self.logger is not None:", "x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain')", "min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x),", "== '__main__': if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter =", "token} resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break except", "], [ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 } ],", "exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except", "EU-DEM layers\", \"available\": [ [ { \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\":", "None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4:", "while True: if requre_count > 4: break try: _url = self.get_url(x, y, z)", "resolution \" \"SRTM between 60N and 60S, 30m Europe. Minimum global coverage of", "\"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain')", "e: resp = None time.sleep(3) if resp is None: return None resp_json =", "\"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ],", "10m - 30m resolution CONUS, 30m resolution \" \"SRTM between 60N and 60S,", "0, \"startY\": 0 } ], [ { \"endX\": 127, \"endY\": 63, \"startX\": 0,", "super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self,", "latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\"", "], [ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 } ],", "} ], [ { \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 }", "1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr =", "5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox,", "the European Union - EU-DEM layers\", \"available\": [ [ { \"endX\": 1, \"endY\":", "t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e: if self.logger is", "'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp =", "self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for", "= 90.0 start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat,", "], [ { \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 } ],", "self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox in bboxs: _count =", "class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False):", "e: if self.logger is not None: self.logger.error(e) if __name__ == '__main__': if 1:", "0 } ], [ { \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0", "= \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break try: request_count += 1", "= [] self.download_done_signal.emit() except Exception as e: if self.logger is not None: self.logger.error(e)", "thread.wait() for t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception", "thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox, z):", "\"startY\": 0 } ], [ { \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\":", "min_lat = -90.0 max_lat = 90.0 start_zoom = 0 end_zoom = 5 bbox", "import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token):", "self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x,", "self.logger.error(e) if __name__ == '__main__': if 1: logger = logging.getLogger('down') try: root =", "0 } ] ], \"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK World", "t.quit() self.threads = [] self.download_done_signal.emit() except Exception as e: if self.logger is not", "except Exception as e: resp = None time.sleep(3) requre_count += 1 if resp", "try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox in bboxs: _count", "\"available\": [ [ { \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0 }", "time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num):", "\"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" }", "0 _url = '' access_token = get_access_token(self.token) if access_token is None: return -1", "os import math import logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine,", "token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y", "\"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and \"", "metadatas = { \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus", "\"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "thread in self.threads: thread.start() for thread in self.threads: thread.wait() for t in self.threads:", "[] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db)", "f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return", "logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token", "file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0", "self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except", "4: break try: request_count += 1 param = {'access_token': token} resp = requests.get(url,", "bboxs: while True: if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads", "© CGIAR-CSI, Produced using Copernicus data and \" \"information funded by the European", "bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng,", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 8191, \"endY\": 4095, \"startX\":", "0, \"startY\": 0 } ], [ { \"endX\": 31, \"endY\": 15, \"startX\": 0,", "try: metadatas = { \"attribution\": \"© Analytical Graphics Inc., © CGIAR-CSI, Produced using", "= os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as", "BoundBox def get_access_token(token): resp = None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while", "class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False):", "\"endY\": 511, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 2047, \"endY\":", "= token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x,", "request_count > 4: break try: request_count += 1 param = {'access_token': token} resp", "os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as e:", "127, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 511, \"endY\": 255,", "True: if request_count > 4: break try: request_count += 1 param = {'access_token':", "[ { \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0 } ], [", "'access_token': access_token} while True: if requre_count > 4: break try: _url = self.get_url(x,", "% (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path)", "], [ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 } ],", "in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count) for bbox in bboxs:", "!= 200: continue break except Exception as e: resp = None time.sleep(3) if", "z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s'", "BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start()", "], [ { \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0 } ],", "= BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger)", "task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat,", "resp = None time.sleep(3) if resp is None: return None resp_json = resp.json()", "utf-8 -*- # coding=utf-8 import json import os import math import logging import", "thread in self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads = []", "Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution \" \"SRTM", "self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count = 0 bboxs", "as e: return -1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir", "} ], [ { \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0 }", "file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x,", "1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical", "i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread)", "requre_count += 1 if resp is None: return -1 if resp.status_code != 200:", "{'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count > 4: break", "90.0 start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng,", "\"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "CGIAR-CSI, Produced using Copernicus data and \" \"information funded by the European Union", "-1 self._data2DB(x, y, z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def", "max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) +", "[\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\":", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 2047, \"endY\": 1023, \"startX\":", "CONUS, 30m resolution \" \"SRTM between 60N and 60S, 30m Europe. Minimum global", "255, \"endY\": 127, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 511,", "> 4: break try: _url = self.get_url(x, y, z) resp = requests.get(_url, params=param,", "logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler()", "math import logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain,", "Exception as e: resp = None time.sleep(3) if resp is None: return None", "min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return", "try: _url = self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break", "self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception as", "\"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "\"© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and \" \"information", "= latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1", "requre_count > 4: break try: _url = self.get_url(x, y, z) resp = requests.get(_url,", "16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 32767,", "f: json.dump(metadatas, f) except Exception as e: if self.logger is not None: self.logger.exception(e)", "open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception", "\"endY\": 8191, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 32767, \"endY\":", "max_lng = 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom = 0 end_zoom", "from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count", "\"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox,", "\"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json')", "\"startY\": 0 } ], [ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0, \"startY\":", "= logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO)", "resp is None: return -1 if resp.status_code != 200: return -1 try: with", "-*- # coding=utf-8 import json import os import math import logging import requests", "\"startY\": 0 } ], [ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\":", "= 0 bboxs = self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count", "if __name__ == '__main__': if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader'", "thread out') except Exception as e: logger.error(e) if 0: accessToken = get_access_token() pass", "None: return -1 if resp.status_code != 200: return -1 try: with open(file_path, 'wb')", "[ { \"endX\": 1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 } ], [", "{ \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 } ], [ {", "= self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox,", "TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread,", "def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y,", "], [ { \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0 } ],", "bbox in bboxs: while True: if not self.running: time.sleep(0.01) else: break task_q =", "d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception", "\"endY\": 255, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 1023, \"endY\":", "1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token, thread_num, logger=None,", "= 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root,", "0 } ], [ { \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0", "self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self,", "\"startY\": 0 } ], [ { \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\":", "z, file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox,", "[ [ { \"endX\": 1, \"endY\": 0, \"startX\": 0, \"startY\": 0 } ],", "logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e) if 0:", "resp = None requre_count = 0 _url = '' access_token = get_access_token(self.token) if", "x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path", "0, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 3, \"endY\": 1,", "if requre_count > 4: break try: _url = self.get_url(x, y, z) resp =", "+ 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = { \"attribution\": \"©", "return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas", "\"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir,", "logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat =", "-*- coding: utf-8 -*- # coding=utf-8 import json import os import math import", "min_lng = -180.0 max_lng = 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom", "], [ { \"endX\": 15, \"endY\": 7, \"startX\": 0, \"startY\": 0 } ],", "0, \"startY\": 0 } ], [ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0,", "param = {'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200:", "by the European Union - EU-DEM layers\", \"available\": [ [ { \"endX\": 1,", "\"startY\": 0 } ], [ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\":", "task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token =", "requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e: resp = None time.sleep(3)", "e: resp = None time.sleep(3) requre_count += 1 if resp is None: return", "3, \"endY\": 1, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 7,", "'w') as f: json.dump(metadatas, f) except Exception as e: if self.logger is not", "self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread in self.threads:", "z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path),", "= 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break try:", "_dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w')", "if resp.status_code != 200: return -1 try: with open(file_path, 'wb') as f: for", "{ \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 } ], [ {", "start_zoom = 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom,", "Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution \" \"SRTM between 60N", "access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token}", "0, \"startY\": 0 } ], [ { \"endX\": 255, \"endY\": 127, \"startX\": 0,", "get_access_token(token): resp = None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if", "in self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit()", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 3, \"endY\": 1, \"startX\":", "\"SRTM between 60N and 60S, 30m Europe. Minimum global coverage of 1000m.\", \"extensions\":", "'v': '1.1.0', 'access_token': access_token} while True: if requre_count > 4: break try: _url", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 4095, \"endY\": 2047, \"startX\":", "return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def", "= get_access_token(self.token) if access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v':", "self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = [] for i in", "= os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas, f) except Exception as", "file_path) return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token,", "[\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path =", "self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self,", "latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def", "get_access_token(self.token) if access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0',", "end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d =", "\"endY\": 15, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 63, \"endY\":", "bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8,", "bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng,", "math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = {", "'1.1.0', 'access_token': access_token} while True: if requre_count > 4: break try: _url =", "\"endY\": 1, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 7, \"endY\":", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 7, \"endY\": 3, \"startX\":", "60S, 30m Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ],", "e: if self.logger is not None: self.logger.exception(e) def run(self): try: self.generate_metadata() count =", "\"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 32767, \"endY\": 16383, \"startX\":", "resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token,", "z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z)", "} ], [ { \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0 }", "], [ { \"endX\": 127, \"endY\": 63, \"startX\": 0, \"startY\": 0 } ],", "0, \"startY\": 0 } ], [ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0,", "[ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0 } ], [", "} ] ], \"bounds\": [-180, -90, 180, 90, ], \"description\": \"STK World Terrain", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 16383, \"endY\": 8191, \"startX\":", "_count self.division_done_signal.emit(count) for bbox in bboxs: while True: if not self.running: time.sleep(0.01) else:", "except Exception as e: if self.logger is not None: self.logger.error(e) if __name__ ==", "[ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 } ], [", "7, \"endY\": 3, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 15,", "try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root,", "try: request_count += 1 param = {'access_token': token} resp = requests.get(url, params=param, timeout=2)", "logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def", "write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata(", "180.0 min_lat = -90.0 max_lat = 90.0 start_zoom = 0 end_zoom = 5", "1, \"endY\": 0, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 3,", "0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\":", "None requre_count = 0 _url = '' access_token = get_access_token(self.token) if access_token is", "Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution \" \"SRTM between", "\"information funded by the European Union - EU-DEM layers\", \"available\": [ [ {", "'' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger,", "= latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y),", "if resp is None: return -1 if resp.status_code != 200: return -1 try:", "resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir,", "= logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter)", "file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url =", "2047, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 8191, \"endY\": 4095,", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 255, \"endY\": 127, \"startX\":", "write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread in self.threads: thread.wait()", "thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token =", "{ \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0 } ], [ {", "], \"description\": \"STK World Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS,", "funded by the European Union - EU-DEM layers\", \"available\": [ [ { \"endX\":", "return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q,", "# -*- coding: utf-8 -*- # coding=utf-8 import json import os import math", "True: if requre_count > 4: break try: _url = self.get_url(x, y, z) resp", "root_dir self.token = token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng,", "z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path):", "z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try:", "None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self,", "0 } ], [ { \"endX\": 16383, \"endY\": 8191, \"startX\": 0, \"startY\": 0", "super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain',", "None time.sleep(3) requre_count += 1 if resp is None: return -1 if resp.status_code", "min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main", "self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir,", "thread.start() for thread in self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads", "logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir = root_dir self.token = token", "} ], [ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0 }", "= 0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom)", "'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path, 'w') as f: json.dump(metadatas,", "0, \"startY\": 0 } ], [ { \"endX\": 32767, \"endY\": 16383, \"startX\": 0,", "while True: if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads =", "else: break task_q = self.get_task_queue(bbox) self.threads = [] for i in range(self.thread_num): thread", "return 1 class TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token, thread_num,", "(self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return", "map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None request_count =", "0 } ], [ { \"endX\": 2047, \"endY\": 1023, \"startX\": 0, \"startY\": 0", "z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1,", "\"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\":", "+= _count self.division_done_signal.emit(count) for bbox in bboxs: while True: if not self.running: time.sleep(0.01)", "0, \"startY\": 0 } ] ], \"bounds\": [-180, -90, 180, 90, ], \"description\":", "for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar)", "30m resolution \" \"SRTM between 60N and 60S, 30m Europe. Minimum global coverage", "coding=utf-8 import json import os import math import logging import requests import time", "TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as e:", "\"startY\": 0 } ], [ { \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\":", "TerrainDownloadEngine(DownloadEngine): root_dir = '' def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine,", "\"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir =", "time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp = None", "= root_dir self.token = token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat,", "as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e:", "end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except", "\"endY\": 63, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 255, \"endY\":", "\"startX\": 0, \"startY\": 0 } ] ], \"bounds\": [-180, -90, 180, 90, ],", "1, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 7, \"endY\": 3,", "[ { \"endX\": 4095, \"endY\": 2047, \"startX\": 0, \"startY\": 0 } ], [", "\"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\",", "-90, 180, 90, ], \"description\": \"STK World Terrain Premium Tileset, v1.3. 10m -", "0, \"startY\": 0 } ], [ { \"endX\": 1023, \"endY\": 511, \"startX\": 0,", "write_db=write_db) self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox, z): min_x, min_y", "self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def", "Exception as e: if self.logger is not None: self.logger.error(e) if __name__ == '__main__':", "root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' %", "'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as", "between 60N and 60S, 30m Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\",", "0 bboxs = self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count +=", "= None request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count >", "8191, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 32767, \"endY\": 16383,", "import math import logging import requests import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread,", "\"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 127, \"endY\": 63, \"startX\":", "\"endY\": 2047, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 8191, \"endY\":", "self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for", "resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x, y, z,", "time.sleep(3) if resp is None: return None resp_json = resp.json() return resp_json.get('accessToken') class", "15, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 63, \"endY\": 31,", "= {'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue", "x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True)", "0 } ], [ { \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0", "is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while", "Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\",", "200: return -1 try: with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024):", "= [] for i in range(self.thread_num): thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger,", "chunk in resp.iter_content(chunk_size=1024): if chunk: f.write(chunk) except Exception as e: return -1 self._data2DB(x,", "resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None,", "-1 try: with open(file_path, 'wb') as f: for chunk in resp.iter_content(chunk_size=1024): if chunk:", "255, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 1023, \"endY\": 511,", "z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception as e: resp", "15, \"endY\": 7, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 31,", "math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self): try: metadatas = { \"attribution\":", "\"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\":", "\"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path", "as e: resp = None time.sleep(3) if resp is None: return None resp_json", "break try: _url = self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2)", "\"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\":", "'%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z,", "0, \"startY\": 0 } ], [ { \"endX\": 8191, \"endY\": 4095, \"startX\": 0,", "bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db')", "latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x)", "\"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\", ], \"version\": \"1.31376.0\" } _dir", "\"endY\": 7, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 31, \"endY\":", "self.token = token def bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z)", "os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file) file_hdlr.setFormatter(formatter) logger.addHandler(file_hdlr) logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng", "break try: request_count += 1 param = {'access_token': token} resp = requests.get(url, params=param,", "\"startY\": 0 } ], [ { \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\":", "import time from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox def get_access_token(token): resp =", "None: return None resp_json = resp.json() return resp_json.get('accessToken') class TerrainDownloaderThread(BaseDownloaderThread): URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\"", "_url = '' access_token = get_access_token(self.token) if access_token is None: return -1 param", "> 4: break try: request_count += 1 param = {'access_token': token} resp =", "os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count", "-180.0 max_lng = 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom = 0", "Produced using Copernicus data and \" \"information funded by the European Union -", "timeout=2) break except Exception as e: resp = None time.sleep(3) requre_count += 1", "= self.get_url(x, y, z) resp = requests.get(_url, params=param, stream=True, timeout=2) break except Exception", "0, \"startY\": 0 } ], [ { \"endX\": 63, \"endY\": 31, \"startX\": 0,", "World Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution \"", "4095, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 16383, \"endY\": 8191,", "root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log')", "8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e) if", "db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat)) def", "bbox.max_lng, z) return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1 def generate_metadata(self):", "self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start() for thread", "for thread in self.threads: thread.wait() for t in self.threads: t.stop() t.quit() self.threads =", "\"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\",", "30m Europe. Minimum global coverage of 1000m.\", \"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\":", "1023, \"endY\": 511, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 2047,", "request_count = 0 url = \"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break", "if self.logger is not None: self.logger.error(e) if __name__ == '__main__': if 1: logger", "], \"version\": \"1.31376.0\" } _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir,", "bbox2xyz(self, bbox, z): min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z) max_x, max_y = latlng2tile_terrain(bbox.max_lat,", "\"endY\": 0, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 3, \"endY\":", "self).__init__( root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db') self.token = token self._init_metadata( format='terrain', bounds='%f,%f,%f,%f'", "\"extensions\": [\"watermask\", \"vertexnormals\", \"octvertexnormals\", ], \"format\": \"quantized-mesh-1.0\", \"maxzoom\": 13, \"minzoom\": 0, \"name\": \"world\",", "json import os import math import logging import requests import time from map_download.cmd.BaseDownloader", "\"startY\": 0 } ], [ { \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\":", "} ], [ { \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0 }", "logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file =", "{'access_token': token} resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break", "} ], [ { \"endX\": 63, \"endY\": 31, \"startX\": 0, \"startY\": 0 }", "63, \"endY\": 31, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 127,", "} ], [ { \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0 }", "-1 if resp.status_code != 200: return -1 try: with open(file_path, 'wb') as f:", "\"https://api.cesium.com/v1/assets/1/endpoint\" while True: if request_count > 4: break try: request_count += 1 param", "params=param, stream=True, timeout=2) break except Exception as e: resp = None time.sleep(3) requre_count", "0 os.makedirs(os.path.dirname(file_path), exist_ok=True) resp = None requre_count = 0 _url = '' access_token", "resp = requests.get(url, params=param, timeout=2) if resp.status_code != 200: continue break except Exception", "0 } ], [ { \"endX\": 7, \"endY\": 3, \"startX\": 0, \"startY\": 0", "0 } ], [ { \"endX\": 255, \"endY\": 127, \"startX\": 0, \"startY\": 0", "'Terrain', z, x, y, 'terrain') if os.path.exists(file_path): self._data2DB(x, y, z, file_path) return 0", "max_lng, min_lat, min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000)", "URL = \"https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0\" def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False): super(TerrainDownloaderThread, self).__init__(", "get_url(self, x, y, z): return self.URL.format(x=x, y=y, z=z) def _download(self, x, y, z):", "y, z): file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain') if", "!= 200: return -1 try: with open(file_path, 'wb') as f: for chunk in", "= {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token} while True: if requre_count > 4:", "- EU-DEM layers\", \"available\": [ [ { \"endX\": 1, \"endY\": 0, \"startX\": 0,", "{ \"endX\": 8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 } ], [ {", "Copernicus data and \" \"information funded by the European Union - EU-DEM layers\",", "= -180.0 max_lng = 180.0 min_lat = -90.0 max_lat = 90.0 start_zoom =", "__init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False): super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db) self.root_dir", "7, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 31, \"endY\": 15,", "+= 1 if resp is None: return -1 if resp.status_code != 200: return", "thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in", "= 0 _url = '' access_token = get_access_token(self.token) if access_token is None: return", "0 } ], [ { \"endX\": 3, \"endY\": 1, \"startX\": 0, \"startY\": 0", "8191, \"endY\": 4095, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 16383,", "TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads: thread.start()", "True: if not self.running: time.sleep(0.01) else: break task_q = self.get_task_queue(bbox) self.threads = []", "d.start() time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e) if 0: accessToken", "+ 1 def generate_metadata(self): try: metadatas = { \"attribution\": \"© Analytical Graphics Inc.,", "\"endY\": 3, \"startX\": 0, \"startY\": 0 } ], [ { \"endX\": 15, \"endY\":", "__name__ == '__main__': if 1: logger = logging.getLogger('down') try: root = r'/Users/cugxy/Documents/data/downloader' formatter", "logger.addHandler(hdlr) logger.setLevel(logging.INFO) min_lng = -180.0 max_lng = 180.0 min_lat = -90.0 max_lat =", "except Exception as e: return -1 self._data2DB(x, y, z, file_path) return 1 class", "min_lng, start_zoom, end_zoom) d = TerrainDownloadEngine(root, bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread", "run(self): try: self.generate_metadata() count = 0 bboxs = self.cut_bbox() for bbox in bboxs:", "= self.cut_bbox() for bbox in bboxs: _count = self.get_task_count(bbox) count += _count self.division_done_signal.emit(count)", "= TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger, write_db=self.write_db) thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar) self.threads.append(thread) for thread in self.threads:", "bbox, 8, logger) d.start() time.sleep(10000) logger.error('main thread out') except Exception as e: logger.error(e)", "16383, \"startX\": 0, \"startY\": 0 } ] ], \"bounds\": [-180, -90, 180, 90,", "self.threads: thread.start() for thread in self.threads: thread.wait() for t in self.threads: t.stop() t.quit()", "return -1 if resp.status_code != 200: return -1 try: with open(file_path, 'wb') as", "if access_token is None: return -1 param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token':", "self.root_dir = root_dir self.token = token def bbox2xyz(self, bbox, z): min_x, min_y =", "for t in self.threads: t.stop() t.quit() self.threads = [] self.download_done_signal.emit() except Exception as", "{ \"endX\": 511, \"endY\": 255, \"startX\": 0, \"startY\": 0 } ], [ {", "} _dir = os.path.join(self.root_dir, 'Terrain') os.makedirs(_dir, exist_ok=True) metadatas_path = os.path.join(_dir, 'layer.json') with open(metadatas_path,", "[ { \"endX\": 31, \"endY\": 15, \"startX\": 0, \"startY\": 0 } ], [", "0 end_zoom = 5 bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom) d", "formatter = logging.Formatter('%(levelname)s-%(message)s') hdlr = logging.StreamHandler() log_file = os.path.join(root, 'down.log') file_hdlr = logging.FileHandler(log_file)", "13, \"minzoom\": 0, \"name\": \"world\", \"projection\": \"EPSG:4326\", \"scheme\": \"tms\", \"tilejson\": \"2.1.0\", \"tiles\": [\"{z}/{x}/{y}.terrain?v={version}\"," ]
[ "PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is True: context", "= 1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def", "numpages = pdf.numPages return (numpages > 0) except Exception as e: return False", "= open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else: flag", "life # If it doesn't download after the timeout period, an exceptions is", "is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url)", "get on with life # If it doesn't download after the timeout period,", "+ filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag =", "if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 return flag", "Select from bs4 import BeautifulSoup import ssl import requests import wget from PyPDF2", "@author: dhingratul \"\"\" import urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui", "import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is True: context =", "timeout period, an exceptions is thrown, and we try again except requests.exceptions.RequestException as", "as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename = mdir", "r = requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get out and", "as f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except", "getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the PDF", "driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try: with", "python3 # -*- coding: utf-8 -*- \"\"\" Created on Fri Nov 10 23:28:58", "mdir, filename, flag=False): if flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url,", "on Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import os", "True: # Keep trying until the webpage successfully downloads try: r = requests.get(pdf_url,", "as myfile: myfile.write(pdf_url + '\\n') filename = mdir + filename with open(filename, 'wb')", "return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context", "else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename, 'wb')", "+ '\\n') filename = mdir + filename with open(filename, 'wb') as f: f.write(r.content)", "return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying", "download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage", "flag = 0 else: flag = 1 file.close() return flag def download_file_R(pdf_url, mdir,", "f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except Exception", "= requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get out and get", "'\\n') filename = mdir + filename with open(filename, 'wb') as f: f.write(r.content) if", "requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename =", "import BeautifulSoup import ssl import requests import wget from PyPDF2 import PdfFileReader def", "# If it downloads, get out and get on with life # If", "PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is True: context = ssl._create_unverified_context()", "import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag", "= ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir", "filename = mdir + filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size ==", "trying until the webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break", "== 0: flag = 0 else: flag = 1 file.close() return flag def", "try: r = requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get out", "filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag =", "ssl import requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename,", "'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages > 0)", "until the webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break #", "filename, flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if", "= webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\"", "f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1", "webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break # If it", "PDF valid \"\"\" try: with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages", "return driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try: with open(fn,", "urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size", "import ssl import requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir,", "= PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except Exception as e:", "<reponame>vahini01/electoral_rolls #!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on Fri Nov", "it doesn't download after the timeout period, an exceptions is thrown, and we", "the timeout period, an exceptions is thrown, and we try again except requests.exceptions.RequestException", "it downloads, get out and get on with life # If it doesn't", "0: flag = 0 else: flag = 1 return flag def getDriver(url): driver", "with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages", "the webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break # If", "period, an exceptions is thrown, and we try again except requests.exceptions.RequestException as e:", "PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except Exception as e: return", "1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename", "file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else:", "mdir + filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag", "after the timeout period, an exceptions is thrown, and we try again except", "verify=False, timeout=10) break # If it downloads, get out and get on with", "mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag", "and we try again except requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile:", "If it doesn't download after the timeout period, an exceptions is thrown, and", "from selenium import webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import", "f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 return", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- \"\"\" Created on Fri Nov 10", "filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0", "download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url,", "requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get out and get on", "return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check", "dhingratul \"\"\" import urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui import", "flag = 1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver", "download after the timeout period, an exceptions is thrown, and we try again", "flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size", "= 0 else: flag = 1 file.close() return flag def download_file_R(pdf_url, mdir, filename,", "is the PDF valid \"\"\" try: with open(fn, 'rb') as f: pdf =", "Created on Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import", "0 else: flag = 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename", "flag = 0 else: flag = 1 return flag def getDriver(url): driver =", "mdir + filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0:", "filename) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 return", "requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage successfully downloads try: r", "successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break # If it downloads,", "response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename", "= mdir + filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size ==", "\"\"\" import urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui import Select", "requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if", "os.stat(filename).st_size == 0: flag = 0 else: flag = 1 file.close() return flag", "= 0 else: flag = 1 return flag def getDriver(url): driver = webdriver.Chrome()", "download_file(pdf_url, mdir, filename, flag=False): if flag is True: context = ssl._create_unverified_context() response =", "coding: utf-8 -*- \"\"\" Created on Fri Nov 10 23:28:58 2017 @author: dhingratul", "flag = 0 else: flag = 1 return flag def download_file_W(pdf_url, mdir, filename,", "0: flag = 0 else: flag = 1 return flag def download_file_W(pdf_url, mdir,", "is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try: with open(fn, 'rb') as f:", "= urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename, 'wb') file.write(response.read()) if", "Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import os from selenium", "break # If it downloads, get out and get on with life #", "= 0 else: flag = 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False):", "selenium import webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl", "else: flag = 1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings()", "get out and get on with life # If it doesn't download after", "again except requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n')", "e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename = mdir +", "from bs4 import BeautifulSoup import ssl import requests import wget from PyPDF2 import", "flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until", "BeautifulSoup import ssl import requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url,", "context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename,", "def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try: with open(fn, 'rb') as", "10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import os from selenium import", "0: flag = 0 else: flag = 1 file.close() return flag def download_file_R(pdf_url,", "doesn't download after the timeout period, an exceptions is thrown, and we try", "on with life # If it doesn't download after the timeout period, an", "import webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl import", "wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is", "= 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir +", "= urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename file", "selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl import requests import wget", "\"\"\" Created on Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request", "open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else: flag =", "with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0", "else: flag = 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename =", "webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl import requests", "+ filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag", "Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import os from", "def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the", "ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0 else: flag =", "response = urllib.request.urlopen(pdf_url) filename = mdir + filename file = open(filename, 'wb') file.write(response.read())", "file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep", "while True: # Keep trying until the webpage successfully downloads try: r =", "out and get on with life # If it doesn't download after the", "flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context =", "-*- coding: utf-8 -*- \"\"\" Created on Fri Nov 10 23:28:58 2017 @author:", "thrown, and we try again except requests.exceptions.RequestException as e: with open(file_out, \"a\") as", "try: with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages return", "23:28:58 2017 @author: dhingratul \"\"\" import urllib.request import os from selenium import webdriver", "if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 file.close() return", "else: flag = 1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return", "try again except requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url +", "0 else: flag = 1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url)", "as f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else: flag =", "wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1", "'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1", "'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else: flag", "with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename = mdir + filename", "# -*- coding: utf-8 -*- \"\"\" Created on Fri Nov 10 23:28:58 2017", "file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0 else: flag = 1 file.close()", "os from selenium import webdriver from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup", "flag = 1 return flag def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir", "mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename)", "# Keep trying until the webpage successfully downloads try: r = requests.get(pdf_url, verify=False,", "open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename = mdir + filename with", "= 1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True:", "filename, flag=False): if flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context)", "from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False): if flag is True:", "1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: #", "filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0: flag = 0", "bs4 import BeautifulSoup import ssl import requests import wget from PyPDF2 import PdfFileReader", "driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid", "exceptions is thrown, and we try again except requests.exceptions.RequestException as e: with open(file_out,", "myfile: myfile.write(pdf_url + '\\n') filename = mdir + filename with open(filename, 'wb') as", "downloads try: r = requests.get(pdf_url, verify=False, timeout=10) break # If it downloads, get", "mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage successfully", "myfile.write(pdf_url + '\\n') filename = mdir + filename with open(filename, 'wb') as f:", "os.stat(filename).st_size == 0: flag = 0 else: flag = 1 return flag def", "+ filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag =", "filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage successfully downloads", "\"\"\"Check is the PDF valid \"\"\" try: with open(fn, 'rb') as f: pdf", "flag=False): if flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else:", "def download_file(pdf_url, mdir, filename, flag=False): if flag is True: context = ssl._create_unverified_context() response", "ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0 else:", "open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size == 0: flag = 0 else:", "urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir + filename file =", "urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui import Select from bs4", "if flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response", "== 0: flag = 0 else: flag = 1 return flag def download_file_W(pdf_url,", "Keep trying until the webpage successfully downloads try: r = requests.get(pdf_url, verify=False, timeout=10)", "def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the", "flag = 1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out): requests.packages.urllib3.disable_warnings() while", "we try again except requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url", "ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename = mdir +", "= mdir + filename file = open(filename, 'wb') file.write(response.read()) if os.stat(filename).st_size == 0:", "driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try: with open(fn, 'rb')", "open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages >", "True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename", "webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is the PDF valid \"\"\" try:", "except requests.exceptions.RequestException as e: with open(file_out, \"a\") as myfile: myfile.write(pdf_url + '\\n') filename", "-*- \"\"\" Created on Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\" import", "= ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0: flag = 0 else: flag", "= mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size == 0:", "flag is True: context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response =", "and get on with life # If it doesn't download after the timeout", "\"\"\" try: with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages = pdf.numPages", "2017 @author: dhingratul \"\"\" import urllib.request import os from selenium import webdriver from", "an exceptions is thrown, and we try again except requests.exceptions.RequestException as e: with", "from selenium.webdriver.support.ui import Select from bs4 import BeautifulSoup import ssl import requests import", "If it downloads, get out and get on with life # If it", "\"a\") as myfile: myfile.write(pdf_url + '\\n') filename = mdir + filename with open(filename,", "downloads, get out and get on with life # If it doesn't download", "timeout=10) break # If it downloads, get out and get on with life", "pdf = PdfFileReader(f) numpages = pdf.numPages return (numpages > 0) except Exception as", "== 0: flag = 0 else: flag = 1 return flag def getDriver(url):", "filename = mdir + filename with open(filename, 'wb') as f: f.write(r.content) if os.stat(filename).st_size", "0 else: flag = 1 file.close() return flag def download_file_R(pdf_url, mdir, filename, file_out):", "def download_file_W(pdf_url, mdir, filename, flag=False): filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context", "1 return flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn):", "filename = mdir + filename ssl._create_default_https_context = ssl._create_unverified_context wget.download(pdf_url, filename) if os.stat(filename).st_size ==", "utf-8 -*- \"\"\" Created on Fri Nov 10 23:28:58 2017 @author: dhingratul \"\"\"", "with life # If it doesn't download after the timeout period, an exceptions", "valid \"\"\" try: with open(fn, 'rb') as f: pdf = PdfFileReader(f) numpages =", "is thrown, and we try again except requests.exceptions.RequestException as e: with open(file_out, \"a\")", "context = ssl._create_unverified_context() response = urllib.request.urlopen(pdf_url, context=context) else: response = urllib.request.urlopen(pdf_url) filename =", "import requests import wget from PyPDF2 import PdfFileReader def download_file(pdf_url, mdir, filename, flag=False):", "import os from selenium import webdriver from selenium.webdriver.support.ui import Select from bs4 import", "# If it doesn't download after the timeout period, an exceptions is thrown,", "the PDF valid \"\"\" try: with open(fn, 'rb') as f: pdf = PdfFileReader(f)", "file_out): requests.packages.urllib3.disable_warnings() while True: # Keep trying until the webpage successfully downloads try:", "import urllib.request import os from selenium import webdriver from selenium.webdriver.support.ui import Select from", "flag def getDriver(url): driver = webdriver.Chrome() driver.get(url) return driver def is_valid_pdf(fn): \"\"\"Check is", "import Select from bs4 import BeautifulSoup import ssl import requests import wget from" ]
[ "confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets", "= [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS", "data.load_csv_data(confirmed) features = [] targets = [] fig = plt.figure(figsize=(12, 12)) ax =", "= 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/',", "to a given country? Plots similar countries \"\"\" import sys sys.path.insert(0, '..') from", "df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0)", "= data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines", "the most similar trajectory to a given country? Plots similar countries \"\"\" import", "= fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for", "'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff =", "dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12))", "in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet')", "over time as a vector, do a simple K-Nearest Neighbor between countries. What", "as a vector, do a simple K-Nearest Neighbor between countries. What country has", "cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES =", "confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases,", "= data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases')", "2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing", "a simple K-Nearest Neighbor between countries. What country has the most similar trajectory", "Treat each province/state in a country cases over time as a vector, do", "a given country? Plots similar countries \"\"\" import sys sys.path.insert(0, '..') from utils", "label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend()", "os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items():", "data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time", "most similar trajectory to a given country? Plots similar countries \"\"\" import sys", "= os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets =", "sys.path.insert(0, '..') from utils import data import os import sklearn import numpy as", "dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm =", "'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets = [] fig =", "['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,)", "cases over time as a vector, do a simple K-Nearest Neighbor between countries.", "vector, do a simple K-Nearest Neighbor between countries. What country has the most", "import sklearn import numpy as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight')", "= other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and {other_region}') plt.savefig(f'results/raw_manhattan/{region}.png') plt.close() print(region)", "as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed", "ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log')", "= os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region, dist in", "= open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig =", "cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days", "22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '')", "import data import os import sklearn import numpy as np import json import", "import os import sklearn import numpy as np import json import matplotlib.pyplot as", "other_region = dist['manhattan'][0] regions = [region, other_region] for val in regions: df =", "\"\"\" import sys sys.path.insert(0, '..') from utils import data import os import sklearn", "= '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed)", "'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets = [] fig = plt.figure(figsize=(12,", "'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff", "has the most similar trajectory to a given country? Plots similar countries \"\"\"", "ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region]", "from utils import data import os import sklearn import numpy as np import", "(days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region", "countries. What country has the most similar trajectory to a given country? Plots", "= len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for", "np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH", "time as a vector, do a simple K-Nearest Neighbor between countries. What country", "os import sklearn import numpy as np import json import matplotlib.pyplot as plt", "ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed", "HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv')", "BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed =", "ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout()", "plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions =", "# ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH,", "utils import data import os import sklearn import numpy as np import json", "summary ------------------ Treat each province/state in a country cases over time as a", "'../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features", "= [] targets = [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm", "between countries. What country has the most similar trajectory to a given country?", "a vector, do a simple K-Nearest Neighbor between countries. What country has the", "Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*',", "fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for val", "import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' #", "= data.load_csv_data(confirmed) features = [] targets = [] fig = plt.figure(figsize=(12, 12)) ax", "NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f)", "cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of", "lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22,", "------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed", "for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111)", "What country has the most similar trajectory to a given country? Plots similar", "a country cases over time as a vector, do a simple K-Nearest Neighbor", "json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/'", "os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets = []", "in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases", "since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '') other_region =", "len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region,", "region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and {other_region}')", "= plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions", "cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for val in", "ax.legend() plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases", "plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join(", "plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed =", "\"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val)", "country has the most similar trajectory to a given country? Plots similar countries", "do a simple K-Nearest Neighbor between countries. What country has the most similar", "fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0]", "trajectory to a given country? Plots similar countries \"\"\" import sys sys.path.insert(0, '..')", "open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12,", "Neighbor between countries. What country has the most similar trajectory to a given", "12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid',", "= fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted']", "= plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES)", "dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region, dist", "[] targets = [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm =", "ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed',", "each province/state in a country cases over time as a vector, do a", "other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and {other_region}') plt.savefig(f'results/raw_manhattan/{region}.png') plt.close()", "similar trajectory to a given country? Plots similar countries \"\"\" import sys sys.path.insert(0,", "labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed", "= dist['manhattan'][0] regions = [region, other_region] for val in regions: df = data.filter_by_attribute(", "plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region =", "as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS -------------", "= plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES", "of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region", "\"\"\" Experiment summary ------------------ Treat each province/state in a country cases over time", "plt.tight_layout() region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in", "import numpy as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------", "plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff", "region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm", "NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff =", "numpy as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS", "country cases over time as a vector, do a simple K-Nearest Neighbor between", "json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax =", "similar countries \"\"\" import sys sys.path.insert(0, '..') from utils import data import os", "sklearn import numpy as np import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') #", "= cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since", "= ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\")", "'..') from utils import data import os import sklearn import numpy as np", "BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = [] targets = [] fig", "matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------", "0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json')", "= json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax", "LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f", "= region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and", "------------ HYPERPARAMETERS ------------- BASE_PATH = '../COVID-19/csse_covid_19_data/' # ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series',", "given country? Plots similar countries \"\"\" import sys sys.path.insert(0, '..') from utils import", "fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0", "fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES = ['solid', 'dashed', 'dotted'] NUM_STYLES", "f = open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight') fig", "cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*',", "# ------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features =", "12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region,", "[region, other_region] for val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases,", "dist['manhattan'][0] regions = [region, other_region] for val in regions: df = data.filter_by_attribute( confirmed,", "other_region] for val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels", "Experiment summary ------------------ Treat each province/state in a country cases over time as", "cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('# of confirmed cases') ax.set_xlabel(\"Time (days since Jan", "import sys sys.path.insert(0, '..') from utils import data import os import sklearn import", "val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df)", "for val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels =", "regions = [region, other_region] for val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\",", "------------------ Treat each province/state in a country cases over time as a vector,", "val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines = ax.plot(cases, label=val) ax.set_ylabel('#", "data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases = cases.sum(axis=0) lines =", "plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS = 0 LINE_STYLES =", "in a country cases over time as a vector, do a simple K-Nearest", "= plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for val in regions:", "'knn_raw.json') f = open(dist_diff,) dist_diff = json.load(f) for region, dist in dist_diff.items(): plt.style.use('fivethirtyeight')", "= ['solid', 'dashed', 'dotted'] NUM_STYLES = len(LINE_STYLES) dist_diff = os.path.join('../exp/results/', 'knn_raw.json') f =", "'') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region} and {other_region}') plt.savefig(f'results/raw_manhattan/{region}.png')", "plt.get_cmap('jet') other_region = dist['manhattan'][0] regions = [region, other_region] for val in regions: df", "regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val) cases, labels = data.get_cases_chronologically(df) cases =", "features = [] targets = [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111)", "confirmed = data.load_csv_data(confirmed) features = [] targets = [] fig = plt.figure(figsize=(12, 12))", "[] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') NUM_COLORS =", "sys sys.path.insert(0, '..') from utils import data import os import sklearn import numpy", "dist_diff.items(): plt.style.use('fivethirtyeight') fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet') other_region", "<gh_stars>0 \"\"\" Experiment summary ------------------ Treat each province/state in a country cases over", "confirmed cases') ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region =", "data import os import sklearn import numpy as np import json import matplotlib.pyplot", "= [region, other_region] for val in regions: df = data.filter_by_attribute( confirmed, \"Country/Region\", val)", "import json import matplotlib.pyplot as plt plt.style.use('fivethirtyeight') # ------------ HYPERPARAMETERS ------------- BASE_PATH =", "------------------------------------------ confirmed = os.path.join( BASE_PATH, 'csse_covid_19_time_series', 'time_series_covid19_confirmed_global.csv') confirmed = data.load_csv_data(confirmed) features = []", "country? Plots similar countries \"\"\" import sys sys.path.insert(0, '..') from utils import data", "province/state in a country cases over time as a vector, do a simple", "targets = [] fig = plt.figure(figsize=(12, 12)) ax = fig.add_subplot(111) cm = plt.get_cmap('jet')", "region = region.replace('*', '') other_region = other_region.replace('*', '') plt.title(f'Comparing confirmed cases in {region}", "countries \"\"\" import sys sys.path.insert(0, '..') from utils import data import os import", "simple K-Nearest Neighbor between countries. What country has the most similar trajectory to", "K-Nearest Neighbor between countries. What country has the most similar trajectory to a", "ax.set_xlabel(\"Time (days since Jan 22, 2020)\") ax.set_yscale('log') ax.legend() plt.tight_layout() region = region.replace('*', '')", "Plots similar countries \"\"\" import sys sys.path.insert(0, '..') from utils import data import" ]
[ "* x^2 + ... + b_m * x^{m + 1}|) `C`: Q(x) =", "= gpu() if cuda else cpu() # register and configure weights (numerator and", "denominator (Q). Default ``(5, 4)`` cuda (bool): whether to execute on cuda device.", "on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE", "w_denominator = mx.nd.array(w_denominator) # register the amount of weights in numerator and denominator,", "+ |b_0 * x| + | b_1 * x^2| + ... + |", "+ a_n * x^n) and `A`: Q(x) = (1 + |b_0 * x|", "Rational activation functions with MXNET networks. \"\"\" import mxnet as mx from mxnet", "external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator and", "CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE version (str): Version of", "b_m * x^m|) `D`: like `B` with noised coefficients b_i Default ``A`` trainable", "with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null',", "version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training,", "+ b_1 * x^2 + ... + b_m * x^{m + 1}|) `C`:", "using Learnable Rational activation functions with MXNET networks. \"\"\" import mxnet as mx", "``A`` trainable (bool): Whether the weights are trainable, i.e, if they are updated", "+ b_1 * x + b_2 * x^2 + ... + b_m *", "Q(x) = (1 + |b_0 * x| + | b_1 * x^2| +", "... + b_m * x^m|) `D`: like `B` with noised coefficients b_i Default", "function is trainable, since this information needs to be passed to # version", "w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register whether function is trainable,", "def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length,", "HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from", "differentiable=trainable) # register whether function is trainable, since this information needs to be", "this activation function. \"\"\" from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation,", "whether to execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED.", "initial parameter configuration from external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func)", "Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func", "import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments:", "init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant(", "during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self,", "get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator to mxnet arrays w_numerator", ".get(version) if self.rational_func is None: raise ValueError( \"rational activation function version %s not", "x^2 + ... + a_n * x^n) and `A`: Q(x) = (1 +", "shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register whether function", "``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False,", "Default ``A`` trainable (bool): Whether the weights are trainable, i.e, if they are", "'D': _version_d} \\ .get(version) if self.rational_func is None: raise ValueError( \"rational activation function", "_version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func is None:", "differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable)", "+ ... + | b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0", "CUDA GPUS ARE USED WHEN IT IS POSSIBLE version (str): Version of Rational", "None: raise ValueError( \"rational activation function version %s not implemented\" % version) def", "Rational Activation Functions for MXNET ======================================= This module allows you to create Rational", "self.degrees = degrees self.version = version self.init_approximation = approx_func # set specified context", "self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register whether", "= version self.init_approximation = approx_func # set specified context (currently not happening, since", "* x^2 + ... + a_n * x^n) and `A`: Q(x) = (1", "+ | b_1 * x^2| + ... + | b_m * x^{m+1}|) `B`:", "= approx_func # set rational activation function version self.rational_func = {'A': _version_a, 'B':", "i.e, if they are updated during backward pass. Default ``True`` Returns: HybridBlock: Rational", "is trainable, since this information needs to be passed to # version D", "{'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func is", "degrees (tuple of int): The degrees of the numerator (P) and denominator (Q).", "from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from", "cuda (bool): whether to execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY", "needs to be passed to # version D self.training = trainable self.init_approximation =", "Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated", "The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int):", "are trainable, i.e, if they are updated during backward pass. Default ``True`` Returns:", "b_2 * x^2 + ... + b_m * x^m|) `D`: like `B` with", "execution, but are unable to retrieve them at later stages self.numerator_length = len(w_numerator)", "where P(x) = (a_0 + a_1 * x + a_2 * x^2 +", "and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register", "'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null',", "as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator = self.denominator.data().asnumpy().tolist()", "1}|) `C`: Q(x) = (0.1 + |b_0 + b_1 * x + b_2", "+ ... + a_n * x^n) and `A`: Q(x) = (1 + |b_0", "different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The", "self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy version of this activation function.", "|b_0 + b_1 * x + b_2 * x^2 + ... + b_m", "Functions for MXNET ======================================= This module allows you to create Rational Neural Networks", "x^2 + ... + b_m * x^{m + 1}|) `C`: Q(x) = (0.1", "whether function is trainable, since this information needs to be passed to #", "trainable else 'null', differentiable=trainable) # register whether function is trainable, since this information", "* x^m|) `D`: like `B` with noised coefficients b_i Default ``A`` trainable (bool):", "to be passed to # version D self.training = trainable self.init_approximation = approx_func", "pass. Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5,", "* x^n) and `A`: Q(x) = (1 + |b_0 * x| + |", "`C`: Q(x) = (0.1 + |b_0 + b_1 * x + b_2 *", "and denominator (Q). Default ``(5, 4)`` cuda (bool): whether to execute on cuda", "x| + | b_1 * x^2| + ... + | b_m * x^{m+1}|)", "with MXNET networks. \"\"\" import mxnet as mx from mxnet import initializer from", "during # symbolic execution, but are unable to retrieve them at later stages", "self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees self.version", "weights (numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator),", "updated during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def", "int): The degrees of the numerator (P) and denominator (Q). Default ``(5, 4)``", "symbolic execution, but are unable to retrieve them at later stages self.numerator_length =", "inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated function for", "\"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of", "``(5, 4)`` cuda (bool): whether to execute on cuda device. NOTE: THIS PARAMETER", "(tuple of int): The degrees of the numerator (P) and denominator (Q). Default", "from external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator", "(currently not happening, since unclear, how and why helpful) # self.device = gpu()", "trainable self.init_approximation = approx_func # set rational activation function version self.rational_func = {'A':", "trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external files w_numerator,", "self.training = trainable self.init_approximation = approx_func # set rational activation function version self.rational_func", "Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A',", "(1 + |b_0 * x| + | b_1 * x^2| + ... +", "we need them during # symbolic execution, but are unable to retrieve them", "USED WHEN IT IS POSSIBLE version (str): Version of Rational to use. Rational(x)", "+ a_1 * x + a_2 * x^2 + ... + a_n *", "raise ValueError( \"rational activation function version %s not implemented\" % version) def hybrid_forward(self,", "version %s not implemented\" % version) def hybrid_forward(self, F, x, numerator, denominator): return", "Version of Rational to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 +", "denominator, since we need them during # symbolic execution, but are unable to", "ARE USED WHEN IT IS POSSIBLE version (str): Version of Rational to use.", "a numpy version of this activation function. \"\"\" from rational.numpy import Rational as", "= self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register", "# version D self.training = trainable self.init_approximation = approx_func # set rational activation", "for initialisation. The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple", "P(x) = (a_0 + a_1 * x + a_2 * x^2 + ...", "approx_func # set specified context (currently not happening, since unclear, how and why", "len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees self.version = version", "* x| + | b_1 * x^2| + ... + | b_m *", "+ ... + b_m * x^{m + 1}|) `C`: Q(x) = (0.1 +", "module allows you to create Rational Neural Networks using Learnable Rational activation functions", "unable to retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator)", "mx from mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters", "= trainable self.degrees = degrees self.version = version self.init_approximation = approx_func # set", "(P) and denominator (Q). Default ``(5, 4)`` cuda (bool): whether to execute on", "MXNET networks. \"\"\" import mxnet as mx from mxnet import initializer from mxnet.gluon", "self.denominator_length) def numpy(self): \"\"\" Returns a numpy version of this activation function. \"\"\"", "parameter configuration from external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) #", "rational activation function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D':", "# read initial parameter configuration from external files w_numerator, w_denominator = get_parameters( version,", "import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import", "if cuda else cpu() # register and configure weights (numerator and denominator coefficients)", "passed to # version D self.training = trainable self.init_approximation = approx_func # set", "Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs):", "to execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA", "D self.training = trainable self.init_approximation = approx_func # set rational activation function version", "backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu',", "_version_d} \\ .get(version) if self.rational_func is None: raise ValueError( \"rational activation function version", "are unable to retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length =", "# register whether function is trainable, since this information needs to be passed", "class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str):", "since we need them during # symbolic execution, but are unable to retrieve", "read initial parameter configuration from external files w_numerator, w_denominator = get_parameters( version, degrees,", "self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator", "= {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func", "numpy version of this activation function. \"\"\" from rational.numpy import Rational as Rational_numpy", "trainable, i.e, if they are updated during backward pass. Default ``True`` Returns: HybridBlock:", "x + b_1 * x^2 + ... + b_m * x^{m + 1}|)", "configuration from external files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert", "version self.init_approximation = approx_func # set specified context (currently not happening, since unclear,", "CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE version (str):", "<reponame>steven-lang/rational_activations \"\"\" Rational Activation Functions for MXNET ======================================= This module allows you to", "cpu() # register and configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator", "happening, since unclear, how and why helpful) # self.device = gpu() if cuda", "ValueError( \"rational activation function version %s not implemented\" % version) def hybrid_forward(self, F,", "(1 + |b_0 * x + b_1 * x^2 + ... + b_m", "degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration", "Activation Functions for MXNET ======================================= This module allows you to create Rational Neural", "HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name", "MXNET ======================================= This module allows you to create Rational Neural Networks using Learnable", "import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base", "else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else", "`B`: Q(x) = (1 + |b_0 * x + b_1 * x^2 +", "w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator to", "= mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of weights in numerator", "them during # symbolic execution, but are unable to retrieve them at later", "use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1 * x +", "version, degrees, approx_func) # convert w_numerator and w_denominator to mxnet arrays w_numerator =", "set specified context (currently not happening, since unclear, how and why helpful) #", "b_m * x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0 + b_1", "... + a_n * x^n) and `A`: Q(x) = (1 + |b_0 *", "= get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator to mxnet arrays", "NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT", "init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) # register whether function is", "later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees =", "numerator (P) and denominator (Q). Default ``(5, 4)`` cuda (bool): whether to execute", "to create Rational Neural Networks using Learnable Rational activation functions with MXNET networks.", "POSSIBLE version (str): Version of Rational to use. Rational(x) = P(x)/Q(x), where P(x)", "activation function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d}", "= (0.1 + |b_0 + b_1 * x + b_2 * x^2 +", "information needs to be passed to # version D self.training = trainable self.init_approximation", "_version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational", "trainable (bool): Whether the weights are trainable, i.e, if they are updated during", "b_1 * x^2| + ... + | b_m * x^{m+1}|) `B`: Q(x) =", "(a_0 + a_1 * x + a_2 * x^2 + ... + a_n", "the amount of weights in numerator and denominator, since we need them during", "self.init_approximation = approx_func # set specified context (currently not happening, since unclear, how", "a_2 * x^2 + ... + a_n * x^n) and `A`: Q(x) =", "mxnet as mx from mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights", "coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else", "retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training =", "get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class", "to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount", "_version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation", "version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version)", "+ b_2 * x^2 + ... + b_m * x^m|) `D`: like `B`", "(numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write'", "`D`: like `B` with noised coefficients b_i Default ``A`` trainable (bool): Whether the", "**kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external files w_numerator, w_denominator", "import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d", "b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x + b_1", "need them during # symbolic execution, but are unable to retrieve them at", "at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees", "denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable", "a_n * x^n) and `A`: Q(x) = (1 + |b_0 * x| +", "__init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial", "* x^2| + ... + | b_m * x^{m+1}|) `B`: Q(x) = (1", "degrees self.version = version self.init_approximation = approx_func # set specified context (currently not", "since unclear, how and why helpful) # self.device = gpu() if cuda else", "Q(x) = (0.1 + |b_0 + b_1 * x + b_2 * x^2", "\\ .get(version) if self.rational_func is None: raise ValueError( \"rational activation function version %s", "= P(x)/Q(x), where P(x) = (a_0 + a_1 * x + a_2 *", "len(w_denominator) self.training = trainable self.degrees = degrees self.version = version self.init_approximation = approx_func", "F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def", "approximated function for initialisation. The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu``", "# convert w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator =", "helpful) # self.device = gpu() if cuda else cpu() # register and configure", "context (currently not happening, since unclear, how and why helpful) # self.device =", "+ b_m * x^m|) `D`: like `B` with noised coefficients b_i Default ``A``", "how and why helpful) # self.device = gpu() if cuda else cpu() #", "initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a,", "self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator',", "since this information needs to be passed to # version D self.training =", "_version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func is None: raise ValueError(", "self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy", "`B` with noised coefficients b_i Default ``A`` trainable (bool): Whether the weights are", "Whether the weights are trainable, i.e, if they are updated during backward pass.", "rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``.", "numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\"", "Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator = self.denominator.data().asnumpy().tolist() return", "w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator),", "# set rational activation function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C':", "def numpy(self): \"\"\" Returns a numpy version of this activation function. \"\"\" from", "activation function. \"\"\" from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees,", "# register the amount of weights in numerator and denominator, since we need", "device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN", "+ |b_0 * x + b_1 * x^2 + ... + b_m *", "(str): Version of Rational to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0", "PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE", "version (str): Version of Rational to use. Rational(x) = P(x)/Q(x), where P(x) =", "mx.nd.array(w_denominator) # register the amount of weights in numerator and denominator, since we", "grad_req='write' if trainable else 'null', differentiable=trainable) # register whether function is trainable, since", "of the numerator (P) and denominator (Q). Default ``(5, 4)`` cuda (bool): whether", "trainable self.degrees = degrees self.version = version self.init_approximation = approx_func # set specified", "self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable else 'null', differentiable=trainable) #", "activation functions with MXNET networks. \"\"\" import mxnet as mx from mxnet import", "self.device = gpu() if cuda else cpu() # register and configure weights (numerator", "arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of weights", "`A`: Q(x) = (1 + |b_0 * x| + | b_1 * x^2|", "weights in numerator and denominator, since we need them during # symbolic execution,", "else 'null', differentiable=trainable) # register whether function is trainable, since this information needs", "of the approximated function for initialisation. The different functions are available in `rational.rationals_config.json`.", "grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write'", "if self.rational_func is None: raise ValueError( \"rational activation function version %s not implemented\"", "NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE version (str): Version", "cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external", "= mx.nd.array(w_denominator) # register the amount of weights in numerator and denominator, since", "= (1 + |b_0 * x| + | b_1 * x^2| + ...", "x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0 + b_1 * x", "to retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training", "mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of", "degrees, approx_func) # convert w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator)", "block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs)", "* x + a_2 * x^2 + ... + a_n * x^n) and", "x + b_2 * x^2 + ... + b_m * x^m|) `D`: like", "denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy version of this", "_version_c, 'D': _version_d} \\ .get(version) if self.rational_func is None: raise ValueError( \"rational activation", "of Rational to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1", "Networks using Learnable Rational activation functions with MXNET networks. \"\"\" import mxnet as", "This module allows you to create Rational Neural Networks using Learnable Rational activation", "# symbolic execution, but are unable to retrieve them at later stages self.numerator_length", "to # version D self.training = trainable self.init_approximation = approx_func # set rational", "are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees of", "else cpu() # register and configure weights (numerator and denominator coefficients) with self.name_scope():", "======================================= This module allows you to create Rational Neural Networks using Learnable Rational", "of weights in numerator and denominator, since we need them during # symbolic", "self.init_approximation = approx_func # set rational activation function version self.rational_func = {'A': _version_a,", "self).__init__(**kwargs) # read initial parameter configuration from external files w_numerator, w_denominator = get_parameters(", "from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator =", "\"\"\" Rational Activation Functions for MXNET ======================================= This module allows you to create", "if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if", "Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the", "register the amount of weights in numerator and denominator, since we need them", "``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated function for initialisation. The", "THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS", "as mx from mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import", "* x + b_2 * x^2 + ... + b_m * x^m|) `D`:", "x^m|) `D`: like `B` with noised coefficients b_i Default ``A`` trainable (bool): Whether", "The name of the approximated function for initialisation. The different functions are available", "b_i Default ``A`` trainable (bool): Whether the weights are trainable, i.e, if they", "this information needs to be passed to # version D self.training = trainable", "= self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator =", "w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) #", "stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees", "self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if", "4)`` cuda (bool): whether to execute on cuda device. NOTE: THIS PARAMETER IS", "Learnable Rational activation functions with MXNET networks. \"\"\" import mxnet as mx from", "implemented\" % version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator,", "return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a", "= len(w_denominator) self.training = trainable self.degrees = degrees self.version = version self.init_approximation =", "function version %s not implemented\" % version) def hybrid_forward(self, F, x, numerator, denominator):", "from mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from", "\"\"\" Returns a numpy version of this activation function. \"\"\" from rational.numpy import", "from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base", "|b_0 * x| + | b_1 * x^2| + ... + | b_m", "Q(x) = (1 + |b_0 * x + b_1 * x^2 + ...", "= approx_func # set specified context (currently not happening, since unclear, how and", "not implemented\" % version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x,", "self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees self.version = version self.init_approximation", "def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read", "= (a_0 + a_1 * x + a_2 * x^2 + ... +", "configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant(", "execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS", "of this activation function. \"\"\" from rational.numpy import Rational as Rational_numpy rational_n =", "and why helpful) # self.device = gpu() if cuda else cpu() # register", "import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator", "Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated function", "IT IS POSSIBLE version (str): Version of Rational to use. Rational(x) = P(x)/Q(x),", "'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func is None: raise ValueError( \"rational", "(0.1 + |b_0 + b_1 * x + b_2 * x^2 + ...", "| b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x +", "HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True,", "and configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),),", "\"\"\" import mxnet as mx from mxnet import initializer from mxnet.gluon import HybridBlock", "weights are trainable, i.e, if they are updated during backward pass. Default ``True``", "files w_numerator, w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator", "approx_func) # convert w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator", "b_1 * x + b_2 * x^2 + ... + b_m * x^m|)", "Returns a numpy version of this activation function. \"\"\" from rational.numpy import Rational", "(bool): whether to execute on cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT", "but are unable to retrieve them at later stages self.numerator_length = len(w_numerator) self.denominator_length", "* x + b_1 * x^2 + ... + b_m * x^{m +", "rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock):", "# self.device = gpu() if cuda else cpu() # register and configure weights", "register whether function is trainable, since this information needs to be passed to", "a_1 * x + a_2 * x^2 + ... + a_n * x^n)", "x^2| + ... + | b_m * x^{m+1}|) `B`: Q(x) = (1 +", "are updated during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\"", "% version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator,", "... + | b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0 *", "and `A`: Q(x) = (1 + |b_0 * x| + | b_1 *", "\"rational activation function version %s not implemented\" % version) def hybrid_forward(self, F, x,", "if they are updated during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid", "with noised coefficients b_i Default ``A`` trainable (bool): Whether the weights are trainable,", "Arguments: approx_func (str): The name of the approximated function for initialisation. The different", "trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),), init=initializer.Constant( w_denominator), grad_req='write' if trainable", "= degrees self.version = version self.init_approximation = approx_func # set specified context (currently", "from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b,", "in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees of the numerator", "_version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting", "self.version = version self.init_approximation = approx_func # set specified context (currently not happening,", "of int): The degrees of the numerator (P) and denominator (Q). Default ``(5,", "(bool): Whether the weights are trainable, i.e, if they are updated during backward", "gpu() if cuda else cpu() # register and configure weights (numerator and denominator", "hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length)", "Rational to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1 *", "numerator and denominator, since we need them during # symbolic execution, but are", "self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable)", "import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\"", "+ a_2 * x^2 + ... + a_n * x^n) and `A`: Q(x)", "Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator =", "rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist()", "specified context (currently not happening, since unclear, how and why helpful) # self.device", "self.rational_func is None: raise ValueError( \"rational activation function version %s not implemented\" %", "them at later stages self.numerator_length = len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable", "cuda else cpu() # register and configure weights (numerator and denominator coefficients) with", "name of the approximated function for initialisation. The different functions are available in", "+ 1}|) `C`: Q(x) = (0.1 + |b_0 + b_1 * x +", "mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of weights in numerator and", "%s not implemented\" % version) def hybrid_forward(self, F, x, numerator, denominator): return self.rational_func(F,", "Default ``True`` Returns: HybridBlock: Rational hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4),", "_version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function,", "for MXNET ======================================= This module allows you to create Rational Neural Networks using", "cuda device. NOTE: THIS PARAMETER IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED", "coefficients b_i Default ``A`` trainable (bool): Whether the weights are trainable, i.e, if", "+ |b_0 + b_1 * x + b_2 * x^2 + ... +", "not happening, since unclear, how and why helpful) # self.device = gpu() if", "to use. Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1 * x", "function for initialisation. The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees", "|b_0 * x + b_1 * x^2 + ... + b_m * x^{m", "# set specified context (currently not happening, since unclear, how and why helpful)", "version D self.training = trainable self.init_approximation = approx_func # set rational activation function", "approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter", "functions with MXNET networks. \"\"\" import mxnet as mx from mxnet import initializer", "(str): The name of the approximated function for initialisation. The different functions are", "Neural Networks using Learnable Rational activation functions with MXNET networks. \"\"\" import mxnet", "= len(w_numerator) self.denominator_length = len(w_denominator) self.training = trainable self.degrees = degrees self.version =", "they are updated during backward pass. Default ``True`` Returns: HybridBlock: Rational hybrid block", "trainable, since this information needs to be passed to # version D self.training", "GPUS ARE USED WHEN IT IS POSSIBLE version (str): Version of Rational to", "... + b_m * x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0", "activation function version %s not implemented\" % version) def hybrid_forward(self, F, x, numerator,", "w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the", "x^2 + ... + b_m * x^m|) `D`: like `B` with noised coefficients", "convert w_numerator and w_denominator to mxnet arrays w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator)", "function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c, 'D': _version_d} \\", "the weights are trainable, i.e, if they are updated during backward pass. Default", "w_numerator = mx.nd.array(w_numerator) w_denominator = mx.nd.array(w_denominator) # register the amount of weights in", "version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external files", "unclear, how and why helpful) # self.device = gpu() if cuda else cpu()", "Default: ``leaky_relu`` degrees (tuple of int): The degrees of the numerator (P) and", "= (1 + |b_0 * x + b_1 * x^2 + ... +", "networks. \"\"\" import mxnet as mx from mxnet import initializer from mxnet.gluon import", "x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy version", "x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x + b_1 * x^2", "``leaky_relu`` degrees (tuple of int): The degrees of the numerator (P) and denominator", "mxnet import initializer from mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions", "Rational(Rational_base, HybridBlock): \"\"\" Rational Activation Function, inheriting from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The", "Rational(x) = P(x)/Q(x), where P(x) = (a_0 + a_1 * x + a_2", "'B': _version_b, 'C': _version_c, 'D': _version_d} \\ .get(version) if self.rational_func is None: raise", "in numerator and denominator, since we need them during # symbolic execution, but", "the approximated function for initialisation. The different functions are available in `rational.rationals_config.json`. Default:", "'null', differentiable=trainable) # register whether function is trainable, since this information needs to", "the numerator (P) and denominator (Q). Default ``(5, 4)`` cuda (bool): whether to", "like `B` with noised coefficients b_i Default ``A`` trainable (bool): Whether the weights", "functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees", "x + a_2 * x^2 + ... + a_n * x^n) and `A`:", "IS POSSIBLE version (str): Version of Rational to use. Rational(x) = P(x)/Q(x), where", "approx_func # set rational activation function version self.rational_func = {'A': _version_a, 'B': _version_b,", "noised coefficients b_i Default ``A`` trainable (bool): Whether the weights are trainable, i.e,", "x, numerator, denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self):", "IS CURRENTLY NOT CONSIDERED. CUDA GPUS ARE USED WHEN IT IS POSSIBLE version", "P(x)/Q(x), where P(x) = (a_0 + a_1 * x + a_2 * x^2", "shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if trainable else 'null', differentiable=trainable) self.denominator = self.params.get(name='w_denominator', shape=(len(w_denominator),),", "+ ... + b_m * x^m|) `D`: like `B` with noised coefficients b_i", "rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator = self.numerator.data().asnumpy().tolist() rational_n.denominator = self.denominator.data().asnumpy().tolist() return rational_n", "w_denominator = get_parameters( version, degrees, approx_func) # convert w_numerator and w_denominator to mxnet", "denominator): return self.rational_func(F, x, numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns", "set rational activation function version self.rational_func = {'A': _version_a, 'B': _version_b, 'C': _version_c,", "initialisation. The different functions are available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of", "b_1 * x^2 + ... + b_m * x^{m + 1}|) `C`: Q(x)", "you to create Rational Neural Networks using Learnable Rational activation functions with MXNET", "amount of weights in numerator and denominator, since we need them during #", "+ | b_m * x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x", "self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy version of this activation", "approx_func (str): The name of the approximated function for initialisation. The different functions", "create Rational Neural Networks using Learnable Rational activation functions with MXNET networks. \"\"\"", "Default ``(5, 4)`` cuda (bool): whether to execute on cuda device. NOTE: THIS", "be passed to # version D self.training = trainable self.init_approximation = approx_func #", "version of this activation function. \"\"\" from rational.numpy import Rational as Rational_numpy rational_n", "`rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees of the numerator (P)", "function. \"\"\" from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version)", "why helpful) # self.device = gpu() if cuda else cpu() # register and", "import mxnet as mx from mxnet import initializer from mxnet.gluon import HybridBlock from", "* x^{m+1}|) `B`: Q(x) = (1 + |b_0 * x + b_1 *", "* x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0 + b_1 *", "numerator, denominator, self.training, self.numerator_length, self.denominator_length) def numpy(self): \"\"\" Returns a numpy version of", "\"\"\" from rational.numpy import Rational as Rational_numpy rational_n = Rational_numpy(self.init_approximation, self.degrees, self.version) rational_n.numerator", "super(Rational, self).__init__(**kwargs) # read initial parameter configuration from external files w_numerator, w_denominator =", "from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import Rational_base class Rational(Rational_base,", "available in `rational.rationals_config.json`. Default: ``leaky_relu`` degrees (tuple of int): The degrees of the", "\"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) #", "rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c, _version_d from rational._base.rational_base import", "+ b_m * x^{m + 1}|) `C`: Q(x) = (0.1 + |b_0 +", "mxnet.gluon import HybridBlock from rational.utils.get_weights import get_parameters from rational.mxnet.versions import _version_a, _version_b, _version_c,", "and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator', shape=(len(w_numerator),), init=initializer.Constant( w_numerator), grad_req='write' if", "* x^2 + ... + b_m * x^m|) `D`: like `B` with noised", "allows you to create Rational Neural Networks using Learnable Rational activation functions with", "The degrees of the numerator (P) and denominator (Q). Default ``(5, 4)`` cuda", "Rational Neural Networks using Learnable Rational activation functions with MXNET networks. \"\"\" import", "and denominator, since we need them during # symbolic execution, but are unable", "= trainable self.init_approximation = approx_func # set rational activation function version self.rational_func =", "numpy(self): \"\"\" Returns a numpy version of this activation function. \"\"\" from rational.numpy", "register and configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator = self.params.get(name='w_numerator',", "(Q). Default ``(5, 4)`` cuda (bool): whether to execute on cuda device. NOTE:", "if trainable else 'null', differentiable=trainable) # register whether function is trainable, since this", "is None: raise ValueError( \"rational activation function version %s not implemented\" % version)", "self.training = trainable self.degrees = degrees self.version = version self.init_approximation = approx_func #", "| b_1 * x^2| + ... + | b_m * x^{m+1}|) `B`: Q(x)", "hybrid block \"\"\" def __init__(self, approx_func='leaky_relu', degrees=(5, 4), cuda=False, version='A', trainable=True, **kwargs): super(Rational,", "degrees of the numerator (P) and denominator (Q). Default ``(5, 4)`` cuda (bool):", "# register and configure weights (numerator and denominator coefficients) with self.name_scope(): self.numerator =", "x^n) and `A`: Q(x) = (1 + |b_0 * x| + | b_1", "from ``mxnet.gluon.HybridBlock``. Arguments: approx_func (str): The name of the approximated function for initialisation.", "WHEN IT IS POSSIBLE version (str): Version of Rational to use. Rational(x) =", "4), cuda=False, version='A', trainable=True, **kwargs): super(Rational, self).__init__(**kwargs) # read initial parameter configuration from" ]
[ "= torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist =", "return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean", "= dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x,", "y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x", "m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy =", "axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) +", "y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1,", "/ denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m,", "\"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist =", "import torch import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num =", "dist = xx + yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt()", "x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom", "+ 1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y):", "return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y", "axis, keepdim=True).expand_as(x) + 1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def", "Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n)", "yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x,", "= xx + yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return", "euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx = torch.pow(x,", "2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes", "Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2", "\"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12", "cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1)", "torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx", "2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py", "F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2 - 2 * torch.mm(x,", "- 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y):", "dim=1) y = F.normalize(y, dim=1) dist = 2 - 2 * torch.mm(x, y.t())", "Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0),", "keepdim=True).expand(m, m).t() dist = xx + yy - 2 * torch.matmul(x, y.t()) dist", "* torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine", "denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom #", "normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x)", "= torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy - 2 *", "dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y =", "def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y,", "xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist", "keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy", "distance.\"\"\" m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy", "n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x,", "import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom", "for criterion.\"\"\" import torch import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\"", "= F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2 - 2 *", "L2-Norm.\"\"\" num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return", "y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t()", "1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes", "denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n", "x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2 - 2", "2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx +", "https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx", "xx + yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist", "# Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n =", "n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy -", "torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom # Source :", "def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx =", "x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m,", "= x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num /", "m).t() dist = xx + yy - 2 * torch.matmul(x, y.t()) dist =", "dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x =", ": https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0)", "torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom =", "2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy - 2 * torch.matmul(x, y.t())", "\"\"\"Utils for criterion.\"\"\" import torch import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs", "= x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n) yy = torch.pow(x, 2).sum(1,", "as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x,", "num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x, y): \"\"\"Computes Euclidean distance.\"\"\"", "dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1)", "= torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num / denom # Source", "criterion.\"\"\" import torch import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num", "keepdim=True).expand_as(x) + 1e-12 return num / denom # Source : https://github.com/earhian/Humpback-Whale-Identification-1st-/blob/master/models/triplet_loss.py def euclidean_dist(x,", "torch import torch.nn.functional as F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x", "y = F.normalize(y, dim=1) dist = 2 - 2 * torch.mm(x, y.t()) return", "num = x denom = torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12 return num", "<filename>torchflare/criterion/utils.py<gh_stars>1-10 \"\"\"Utils for criterion.\"\"\" import torch import torch.nn.functional as F def normalize(x, axis=-1):", "y): \"\"\"Computes Cosine Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist", "torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def cosine_dist(x, y): \"\"\"Computes Cosine Distance.\"\"\"", "\"\"\"Computes Euclidean distance.\"\"\" m, n = x.size(0), y.size(0) xx = torch.pow(x, 2).sum(1, keepdim=True).expand(m,", "def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x, 2, axis,", "torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy - 2 * torch.matmul(x,", "yy = torch.pow(x, 2).sum(1, keepdim=True).expand(m, m).t() dist = xx + yy - 2", "F def normalize(x, axis=-1): \"\"\"Performs L2-Norm.\"\"\" num = x denom = torch.norm(x, 2,", "= F.normalize(y, dim=1) dist = 2 - 2 * torch.mm(x, y.t()) return dist", "+ yy - 2 * torch.matmul(x, y.t()) dist = dist.clamp(min=1e-12).sqrt() return dist def", "Distance.\"\"\" x = F.normalize(x, dim=1) y = F.normalize(y, dim=1) dist = 2 -" ]
[ "for the sbahn_munich integration\"\"\" line_dict = { \"name\": \"S3\", \"color\": \"#333333\", \"text_color\": \"#444444\",", "the sbahn_munich integration\"\"\" line_dict = { \"name\": \"S3\", \"color\": \"#333333\", \"text_color\": \"#444444\", }", "\"\"\"Tests for the sbahn_munich integration\"\"\" line_dict = { \"name\": \"S3\", \"color\": \"#333333\", \"text_color\":" ]
[ "Blueprint from flask import current_app from flask import render_template from flask_login import login_required", "flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def home():", "flask import render_template from flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\",", "flask import current_app from flask import render_template from flask_login import login_required homestack =", "-*- from flask import Blueprint from flask import current_app from flask import render_template", "current_app from flask import render_template from flask_login import login_required homestack = Blueprint(\"homestack\", __name__,", "from flask import current_app from flask import render_template from flask_login import login_required homestack", "/usr/bin/env python2.7 # -*- coding: latin-1 -*- from flask import Blueprint from flask", "render_template from flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required", "import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def home(): return", "<reponame>geudrik/hautomation #! /usr/bin/env python2.7 # -*- coding: latin-1 -*- from flask import Blueprint", "#! /usr/bin/env python2.7 # -*- coding: latin-1 -*- from flask import Blueprint from", "login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def home(): return render_template(\"homestack/home.html\")", "-*- coding: latin-1 -*- from flask import Blueprint from flask import current_app from", "coding: latin-1 -*- from flask import Blueprint from flask import current_app from flask", "import Blueprint from flask import current_app from flask import render_template from flask_login import", "import current_app from flask import render_template from flask_login import login_required homestack = Blueprint(\"homestack\",", "# -*- coding: latin-1 -*- from flask import Blueprint from flask import current_app", "latin-1 -*- from flask import Blueprint from flask import current_app from flask import", "from flask import Blueprint from flask import current_app from flask import render_template from", "from flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"]) @login_required def", "from flask import render_template from flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\")", "python2.7 # -*- coding: latin-1 -*- from flask import Blueprint from flask import", "flask import Blueprint from flask import current_app from flask import render_template from flask_login", "import render_template from flask_login import login_required homestack = Blueprint(\"homestack\", __name__, url_prefix=\"/homestack\") @homestack.route(\"/\", methods=[\"GET\"])" ]
[ "{ 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value:", "forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable:", "required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user =", "( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public':", "form for ethical ads This extends the basic payment form, giving fields for", "Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if", "} widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind':", "dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None", "from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter", "Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter", "forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args,", "can specify a logo URL and site link'), } widgets = { 'dollars':", "'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True)", "are established on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields = (", "this donation public'), } help_texts = { 'public': _('Your name and image will", "= self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create(", "} last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self,", "'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make this donation public'),", "extends the basic payment form, giving fields for credit card number, expiry, and", "validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship Payment', receipt_email=self.cleaned_data['email']", "Gravatar and so we can send you a receipt'), 'logo_url': _(\"URL of your", "card number, expiry, and CVV. The proper Knockout data bindings are established on", "stripe from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support", "\"\"\"Call stripe for payment (not ideal here) and clean up logo < $200\"\"\"", "'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits'", "_(\"URL of your company's logo, images should be 300x300 pixels or less\"), 'dollars':", "_('Your email is used so we can send you a receipt'), } widgets", "less\"), 'dollars': _('Companies donating over $400 can specify a logo URL and site", "name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd',", "'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name =", "from django import forms from django.conf import settings from django.utils.translation import ugettext_lazy as", "class Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', )", "= forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm,", "'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make this donation public'), }", "StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter log = logging.getLogger(__name__) class", "def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user is not", "description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm,", "images should be 300x300 pixels or less\"), 'dollars': _('Companies donating over $400 can", "{ 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable:", "labels = { 'public': _('Make this donation public'), } help_texts = { 'public':", "super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not ideal here) and", "'logo_url': _(\"URL of your company's logo, images should be 300x300 pixels or less\"),", "* 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self,", "'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make", "= forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'],", "and image will be displayed on the donation page'), 'email': _('Your email is", "over $400 can specify a logo URL and site link'), } widgets =", "send you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars'", "card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True)", "self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This", "logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None", "} widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind':", "not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm):", "the donation page'), 'email': _('Your email is used for Gravatar and so we", "import forms from django.conf import settings from django.utils.translation import ugettext_lazy as _ from", "support sign up form This extends the basic payment form, giving fields for", "URL and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars'", "receipt'), 'logo_url': _(\"URL of your company's logo, images should be 300x300 pixels or", "forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self):", "forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value:", "logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits':", "*args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for", "if commit and self.user is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save()", "will be displayed on the donation page'), 'email': _('Your email is used for", "forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }),", "'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make this", "donations\"\"\" import logging from django import forms from django.conf import settings from django.utils.translation", "__init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe", "page'), 'email': _('Your email is used for Gravatar and so we can send", "and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }),", "ethical ads This extends the basic payment form, giving fields for credit card", "payment form, giving fields for credit card number, expiry, and CVV. The proper", "< $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url']", "fields for credit card number, expiry, and CVV. The proper Knockout data bindings", "site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url':", "= { 'email': _('Your email is used so we can send you a", "= forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars'])", "company's logo, images should be 300x300 pixels or less\"), 'dollars': _('Companies donating over", "card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email", "displayed on the donation page'), 'email': _('Your email is used for Gravatar and", "last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args,", "so we can send you a receipt'), 'logo_url': _(\"URL of your company's logo,", "return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This extends the", "a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits':", "fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email': _('Your", "None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment", "Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email':", "from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign", "forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }),", "giving fields for credit card number, expiry, and CVV. The proper Knockout data", "'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), }", "currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter", "widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value:", "forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read", "= None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering',", "def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship Payment',", "= forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs", "up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] =", "'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits,", "dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits =", "from django.conf import settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import", "= { 'public': _('Make this donation public'), } help_texts = { 'public': _('Your", "}), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(),", "_('Your email is used for Gravatar and so we can send you a", "email is used so we can send you a receipt'), } widgets =", "form, giving fields for credit card number, expiry, and CVV. The proper Knockout", "here) and clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars <", "self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not", "used for Gravatar and so we can send you a receipt'), 'logo_url': _(\"URL", "name and image will be displayed on the donation page'), 'email': _('Your email", "ideal here) and clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars", "= { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits,", "_('Your name and image will be displayed on the donation page'), 'email': _('Your", ") help_texts = { 'email': _('Your email is used so we can send", "}), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit:", "stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship Payment', receipt_email=self.cleaned_data['email'] )", "kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not ideal here)", "{ 'public': _('Make this donation public'), } help_texts = { 'public': _('Your name", "so we can send you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={", "payment (not ideal here) and clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars']", "and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form", "}), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def", "_ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import", "None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs", "self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read", "model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts =", "expiry, and CVV. The proper Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\"", "import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter log =", "up form This extends the basic payment form, giving fields for credit card", "This extends the basic payment form, giving fields for credit card number, expiry,", "'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }),", "Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url',", "as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models", "'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True)", "if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) *", "(not ideal here) and clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if", "< 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd',", "supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical", "urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits =", "200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'],", "link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={", "}), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value:", "import stripe from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation", "on the donation page'), 'email': _('Your email is used for Gravatar and so", "commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user is not None and", "{ 'public': _('Your name and image will be displayed on the donation page'),", "form This extends the basic payment form, giving fields for credit card number,", "_('Companies donating over $400 can specify a logo URL and site link'), }", "Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts", "you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }),", "'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url':", "for ethical ads This extends the basic payment form, giving fields for credit", "'public', ) labels = { 'public': _('Make this donation public'), } help_texts =", "StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter log = logging.getLogger(__name__)", "readthedocs.payments.utils import stripe from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm):", "site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), }", "fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels =", "'dollars': _('Companies donating over $400 can specify a logo URL and site link'),", "= super(SupporterForm, self).save(commit) if commit and self.user is not None and self.user.is_authenticated(): supporter.user", "'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }),", "StripeModelForm): \"\"\"Payment form for ethical ads This extends the basic payment form, giving", "self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars'])", "= forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs):", "source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter =", "receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={", "value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email =", "Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels", "ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from", "name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user')", "**kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment", "_('Make this donation public'), } help_texts = { 'public': _('Your name and image", "amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def", "for credit card number, expiry, and CVV. The proper Knockout data bindings are", "def validate_stripe(self): \"\"\"Call stripe for payment (not ideal here) and clean up logo", "300x300 pixels or less\"), 'dollars': _('Companies donating over $400 can specify a logo", "'name', 'email', 'dollars', ) help_texts = { 'email': _('Your email is used so", "django import forms from django.conf import settings from django.utils.translation import ugettext_lazy as _", "be displayed on the donation page'), 'email': _('Your email is used for Gravatar", "= ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = {", "help_texts = { 'public': _('Your name and image will be displayed on the", "logo URL and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value:", "stripe for payment (not ideal here) and clean up logo < $200\"\"\" dollars", "self).save(commit) if commit and self.user is not None and self.user.is_authenticated(): supporter.user = self.user", "required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100,", "'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled'", "forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) *", "the basic payment form, giving fields for credit card number, expiry, and CVV.", "( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email': _('Your email is", "enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={", "**kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not ideal here) and clean up", "{ 'email': _('Your email is used so we can send you a receipt'),", "class Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url',", "dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind':", "= forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def", "from readthedocs.payments.utils import stripe from .models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin,", "None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email']", "donation page'), 'email': _('Your email is used for Gravatar and so we can", "import settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin", "= None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the", "image will be displayed on the donation page'), 'email': _('Your email is used", "of your company's logo, images should be 300x300 pixels or less\"), 'dollars': _('Companies", "= Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public', )", "SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This extends the basic payment form,", "def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call", "'email': _('Your email is used for Gravatar and so we can send you", "self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for", "StripeModelForm): \"\"\"Donation support sign up form This extends the basic payment form, giving", "ads This extends the basic payment form, giving fields for credit card number,", "specify a logo URL and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={", "'email': _('Your email is used so we can send you a receipt'), }", "is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin,", "CVV. The proper Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta:", "'public': _('Your name and image will be displayed on the donation page'), 'email':", "= { 'public': _('Your name and image will be displayed on the donation", "can send you a receipt'), 'logo_url': _(\"URL of your company's logo, images should", "or less\"), 'dollars': _('Companies donating over $400 can specify a logo URL and", "used so we can send you a receipt'), } widgets = { 'dollars':", "basic payment form, giving fields for credit card number, expiry, and CVV. The", "proper Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta: model =", "data bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields", "\"\"\"Donation support sign up form This extends the basic payment form, giving fields", "class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This extends the basic payment", "for Gravatar and so we can send you a receipt'), 'logo_url': _(\"URL of", "} help_texts = { 'public': _('Your name and image will be displayed on", "\"\"\"Payment form for ethical ads This extends the basic payment form, giving fields", "number, expiry, and CVV. The proper Knockout data bindings are established on :py:class:`StripeModelForm`", ") def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user is", "and clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200:", "super(SupporterForm, self).save(commit) if commit and self.user is not None and self.user.is_authenticated(): supporter.user =", "class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This extends the basic payment", "log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This extends", "your company's logo, images should be 300x300 pixels or less\"), 'dollars': _('Companies donating", "forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user", "The proper Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta: model", "we can send you a receipt'), 'logo_url': _(\"URL of your company's logo, images", "a logo URL and site link'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind':", "urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url, enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind':", "receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user", "'site_url', 'public', ) labels = { 'public': _('Make this donation public'), } help_texts", "'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits", "stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] )", "'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={", "django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import", "EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This extends the basic payment form,", "donating over $400 can specify a logo URL and site link'), } widgets", "widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit:", "logo, images should be 300x300 pixels or less\"), 'dollars': _('Companies donating over $400", "forms from django.conf import settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms", "be 300x300 pixels or less\"), 'dollars': _('Companies donating over $400 can specify a", "readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe from .models import Supporter log", "last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create(", "self.user is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter class", "enable: urls_enabled' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits", "for RTD donations\"\"\" import logging from django import forms from django.conf import settings", "email is used for Gravatar and so we can send you a receipt'),", "and so we can send you a receipt'), 'logo_url': _(\"URL of your company's", "supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This extends the basic", "= self.user supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads", "established on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields = ( 'last_4_digits',", "= kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not ideal", "public'), } help_texts = { 'public': _('Your name and image will be displayed", "email = forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the", "settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from", "django.conf import settings from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm,", "clean up logo < $200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url']", "RTD donations\"\"\" import logging from django import forms from django.conf import settings from", "can send you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value:", "model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', 'logo_url', 'site_url', 'public',", ") labels = { 'public': _('Make this donation public'), } help_texts = {", "commit and self.user is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return", "'email', 'dollars', ) help_texts = { 'email': _('Your email is used so we", "'email', 'dollars', 'logo_url', 'site_url', 'public', ) labels = { 'public': _('Make this donation", "bindings are established on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields =", "the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit)", "= ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email': _('Your email", "import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils import stripe", "self).__init__(*args, **kwargs) def validate_stripe(self): \"\"\"Call stripe for payment (not ideal here) and clean", "'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled'", "and self.user is not None and self.user.is_authenticated(): supporter.user = self.user supporter.save() return supporter", "supporter.save() return supporter class EthicalAdForm(StripeResourceMixin, StripeModelForm): \"\"\"Payment form for ethical ads This extends", "= { 'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url,", "help_texts = { 'email': _('Your email is used so we can send you", "validate_stripe(self): \"\"\"Call stripe for payment (not ideal here) and clean up logo <", "supporter = super(SupporterForm, self).save(commit) if commit and self.user is not None and self.user.is_authenticated():", "= logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This extends the", "from django.utils.translation import ugettext_lazy as _ from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin from readthedocs.payments.utils", ".models import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up", "donation public'), } help_texts = { 'public': _('Your name and image will be", "you a receipt'), 'logo_url': _(\"URL of your company's logo, images should be 300x300", "\"\"\" class Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars',", "self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained", "a receipt'), 'logo_url': _(\"URL of your company's logo, images should be 300x300 pixels", "dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] = None stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100,", "pixels or less\"), 'dollars': _('Companies donating over $400 can specify a logo URL", "forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits' }), } last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name", "is used so we can send you a receipt'), } widgets = {", "= Supporter fields = ( 'last_4_digits', 'name', 'email', 'dollars', ) help_texts = {", "100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True):", "on :py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields = ( 'last_4_digits', 'name',", ":py:class:`StripeModelForm` \"\"\" class Meta: model = Supporter fields = ( 'last_4_digits', 'name', 'email',", "$200\"\"\" dollars = self.cleaned_data['dollars'] if dollars < 200: self.cleaned_data['logo_url'] = None self.cleaned_data['site_url'] =", "email = forms.CharField(required=True) def __init__(self, *args, **kwargs): self.user = kwargs.pop('user') super(SupporterForm, self).__init__(*args, **kwargs)", "import logging from django import forms from django.conf import settings from django.utils.translation import", "Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This", "$400 can specify a logo URL and site link'), } widgets = {", "credit card number, expiry, and CVV. The proper Knockout data bindings are established", "'dollars': forms.HiddenInput(attrs={ 'data-bind': 'value: dollars' }), 'last_4_digits': forms.TextInput(attrs={ 'data-bind': 'valueInit: card_digits, value: card_digits'", "is used for Gravatar and so we can send you a receipt'), 'logo_url':", "} last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True) name = forms.CharField(required=True) email = forms.CharField(required=True) def validate_stripe(self):", "sign up form This extends the basic payment form, giving fields for credit", "'last_4_digits', 'name', 'email', 'dollars', ) help_texts = { 'email': _('Your email is used", "for payment (not ideal here) and clean up logo < $200\"\"\" dollars =", "import Supporter log = logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form", "Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and", "logging.getLogger(__name__) class SupporterForm(StripeResourceMixin, StripeModelForm): \"\"\"Donation support sign up form This extends the basic", "'public': _('Make this donation public'), } help_texts = { 'public': _('Your name and", "logging from django import forms from django.conf import settings from django.utils.translation import ugettext_lazy", "'logo_url': forms.TextInput(attrs={ 'data-bind': 'value: logo_url, enable: urls_enabled' }), 'site_url': forms.TextInput(attrs={ 'data-bind': 'value: site_url,", "we can send you a receipt'), } widgets = { 'dollars': forms.HiddenInput(attrs={ 'data-bind':", "Sustained Engineering', receipt_email=self.cleaned_data['email'] ) def save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit", "forms.CharField(required=True) def validate_stripe(self): stripe.Charge.create( amount=int(self.cleaned_data['dollars']) * 100, currency='usd', source=self.cleaned_data['stripe_token'], description='Read the Docs Sponsorship", "'dollars', ) help_texts = { 'email': _('Your email is used so we can", "and CVV. The proper Knockout data bindings are established on :py:class:`StripeModelForm` \"\"\" class", "should be 300x300 pixels or less\"), 'dollars': _('Companies donating over $400 can specify", "save(self, commit=True): supporter = super(SupporterForm, self).save(commit) if commit and self.user is not None", "\"\"\"Forms for RTD donations\"\"\" import logging from django import forms from django.conf import", "send you a receipt'), 'logo_url': _(\"URL of your company's logo, images should be" ]
[ "d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\":", "= { 'q': symbol, 'type': typ, 'output': output, } data = self._get_content(url, params)", "def _get_content(self, url, params): #response = requests.get(url, params=params) response = self.session.get(url, params=params) if", "(tokval == ']')): if (len(result) > 0) and (result[-1][1] == ','): result.pop() #", "\"strike\": \"strike price for this option\" \"vol\": \"the volume of options traded.\" }", "'m': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd':", "fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"'", "token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration = {u'd':", "as StringIO import logging import traceback import datetime import json import token, tokenize", "many of these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price,", "day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\" d = { 'y': date.year,", "m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ, 'output':", "1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict", "self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All',", "\"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols", "# I think this tells us something about what country where the stock", "del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\",", "_get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves", "token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval))", "= pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']):", "for this \"oi\": open interest. How many of these are currently being held", "== ']')): if (len(result) > 0) and (result[-1][1] == ','): result.pop() # fix", "pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True)", "typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ]", "strings if (tokid == token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity',", "json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return", "date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON", "('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except:", "date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix expecting", "[u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df", "Basically, Stock Symbol + 7 if mini option + date + \"C\" or", "df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d) return(data) def _get_content(self, url, params):", "for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: #", "..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3}", "as pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO import", "u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif (tokid == token.OP)", "'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params)", "typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params = { 'q':", "See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically, Stock Symbol + 7", "this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline)", "= json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google", "{'y': 2010, 'm': 1, 'd': 3} \"\"\" d = { 'y': date.year, 'm':", "= [] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ", "'y': date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy", "'q': symbol, 'type': typ, 'output': output, } data = self._get_content(url, params) d =", "the stock is traded. \"OPRA\" means USA. \"expiry\": expiration date for this option", "something about what country where the stock is traded. \"OPRA\" means USA. \"expiry\":", "google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _,", "== 200: content_json = response.text data = json_decode(content_json) return(data) if __name__ == \"__main__\":", "tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string):", "not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval =", "\"s\": option code. Basically, Stock Symbol + 7 if mini option + date", "if (tokid == token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity',", "= pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0,", "\"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction.", "return(data) def _get_content(self, url, params): #response = requests.get(url, params=params) response = self.session.get(url, params=params)", "(\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def", "expiration date for this option \"name\": I don't know. I have never seen", "self.session.get(url, params=params) if response.status_code == 200: content_json = response.text data = json_decode(content_json) return(data)", "'output': output, } data = self._get_content(url, params) d = {} lst = []", "if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING", "Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi =", "in enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'],", "pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params", "\"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\",", "== '}') or (tokval == ']')): if (len(result) > 0) and (result[-1][1] ==", "fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result =", "#!/usr/bin/env python # -*- coding: utf-8 -*- from .base import DataReaderBase from ..tools", "def ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration = {u'd': 1, u'm':", "== token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') #", "data[typ] for i, expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output': output,", "_, _ in tokengen: # fix unquoted strings if (tokid == token.NAME): if", "= self.session.get(url, params=params) if response.status_code == 200: content_json = response.text data = json_decode(content_json)", "{u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014,", "params): #response = requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code == 200:", "\"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df =", "+ \"C\" or \"P\" + price \"strike\": \"strike price for this option\" \"vol\":", "tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string)", "\"\"\" Returns date >>> expiration = {u'd': 1, u'm': 12, u'y': 2014} >>>", "params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ)", "for tokid, tokval, _, _, _ in tokengen: # fix unquoted strings if", "down, \"chg\"? \"e\": # I think this tells us something about what country", "\"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm':", "'m': 1, 'd': 3} \"\"\" d = { 'y': date.year, 'm': date.month, 'd':", "3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns", "((tokval == '}') or (tokval == ']')): if (len(result) > 0) and (result[-1][1]", "us something about what country where the stock is traded. \"OPRA\" means USA.", "single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' %", "def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params =", "== token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid", "{'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm':", "options traded.\" } \"\"\" for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']:", "date for this option \"name\": I don't know. I have never seen a", "df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols", "\"OPRA\" means USA. \"expiry\": expiration date for this option \"name\": I don't know.", "def date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm': ..., 'd': ...} >>>", "lazy JSON - to fix expecting property name this function fixes the json", "http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args,", "datetime import json import token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns date", "[] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ)", "= {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>>", "\"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" = up, \"chr\"", "% tokval # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"):", ">>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\" d =", "where the stock is traded. \"OPRA\" means USA. \"expiry\": expiration date for this", "\"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\":", "== ','): result.pop() # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith", "def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix expecting property name this", "if (len(result) > 0) and (result[-1][1] == ','): result.pop() # fix single-quoted strings", "del data[typ] for i, expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output':", "JSON - to fix expecting property name this function fixes the json output", "df #return(d) return(data) def _get_content(self, url, params): #response = requests.get(url, params=params) response =", "tokengen: # fix unquoted strings if (tokid == token.NAME): if tokval not in", "code. Basically, Stock Symbol + 7 if mini option + date + \"C\"", "df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i]", "for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options']", "\"\"\" d = { 'y': date.year, 'm': date.month, 'd': date.day } return(d) def", "up, \"chr\" = down, \"chg\"? \"e\": # I think this tells us something", "price for this option\" \"vol\": \"the volume of options traded.\" } \"\"\" for", "if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return", "= ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in", "open interest. How many of these are currently being held by others. See,", "'d': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix", "\"\"\" DataReader to fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api", "of options traded.\" } \"\"\" for col in ['Ask', 'Bid', 'c', 'cp', 'Last',", "\"\"\" Handle lazy JSON - to fix expecting property name this function fixes", "this tells us something about what country where the stock is traded. \"OPRA\"", "content_json = response.text data = json_decode(content_json) return(data) if __name__ == \"__main__\": import doctest", "expecting property name this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\"", "datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like", "[] for tokid, tokval, _, _, _ in tokengen: # fix unquoted strings", "= { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\"", "ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google Finance Options see", "fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data", "df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for", "and (result[-1][1] == ','): result.pop() # fix single-quoted strings elif (tokid == token.STRING):", "traceback import datetime import json import token, tokenize def ymd_to_date(y, m, d): \"\"\"", "0) and (result[-1][1] == ','): result.pop() # fix single-quoted strings elif (tokid ==", "tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret =", "'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for typ", "= json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase):", "traded. \"OPRA\" means USA. \"expiry\": expiration date for this option \"name\": I don't", "2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1)", "import token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration =", "% tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif (tokid == token.OP) and", "['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"' %", "{ \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\":", "'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] =", "1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y': ...,", "1, 'd': 3} \"\"\" d = { 'y': date.year, 'm': date.month, 'd': date.day", "\"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" = up, \"chr\" =", "held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically, Stock", "12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d))", "see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def", "from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs):", "m, d): \"\"\" Returns date >>> expiration = {u'd': 1, u'm': 12, u'y':", "these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\":", "# remove invalid commas elif (tokid == token.OP) and ((tokval == '}') or", "= df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] =", "['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id']", "'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params", "from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval,", "url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ, 'output': output, }", "try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret", "{} #d[\"options\"] = df #return(d) return(data) def _get_content(self, url, params): #response = requests.get(url,", "data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] =", "fix expecting property name this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name", "option + date + \"C\" or \"P\" + price \"strike\": \"strike price for", "\"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols =", "name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12',", "\"cp\": \"cp\" \"cs\": change direction. \"chg\" = up, \"chr\" = down, \"chg\"? \"e\":", "*args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'):", "= { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], }", "tokval # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval", "symbol, typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q':", "json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\"", "in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for", "the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = []", "[u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i,", "return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url =", "} data = self._get_content(url, params) d = {} lst = [] for typ", "= u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif (tokid ==", "\"cp\" \"cs\": change direction. \"chg\" = up, \"chr\" = down, \"chg\"? \"e\": #", "\"oi\": open interest. How many of these are currently being held by others.", "2010, 'm': 1, 'd': 3} \"\"\" d = { 'y': date.year, 'm': date.month,", "token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove", "\"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ, 'output': output, } data =", "date + \"C\" or \"P\" + price \"strike\": \"strike price for this option\"", "data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col", "others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically, Stock Symbol +", "or (tokval == ']')): if (len(result) > 0) and (result[-1][1] == ','): result.pop()", ">>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\"", "date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010,", "= up, \"chr\" = down, \"chg\"? \"e\": # I think this tells us", "d): \"\"\" Returns date >>> expiration = {u'd': 1, u'm': 12, u'y': 2014}", "tokval, _, _, _ in tokengen: # fix unquoted strings if (tokid ==", "params = { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'],", "to fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def", "= self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol,", "typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol,", "to fix expecting property name this function fixes the json output from google", "don't know. I have never seen a value for this \"oi\": open interest.", "\"chr\" = down, \"chg\"? \"e\": # I think this tells us something about", "about what country where the stock is traded. \"OPRA\" means USA. \"expiry\": expiration", "lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\":", "tells us something about what country where the stock is traded. \"OPRA\" means", "= df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d) return(data) def _get_content(self, url,", "name this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen =", "to_float, to_int import pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves import", "token.OP) and ((tokval == '}') or (tokval == ']')): if (len(result) > 0)", "']')): if (len(result) > 0) and (result[-1][1] == ','): result.pop() # fix single-quoted", "\"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" =", "is traded. \"OPRA\" means USA. \"expiry\": expiration date for this option \"name\": I", "\"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df", "1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3,", "y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ,", "direction. \"chg\" = up, \"chr\" = down, \"chg\"? \"e\": # I think this", "in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df", "df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst,", "this option\" \"vol\": \"the volume of options traded.\" } \"\"\" for col in", "tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval", "typ, 'output': output, } data = self._get_content(url, params) d = {} lst =", "\"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\"", "+ price \"strike\": \"strike price for this option\" \"vol\": \"the volume of options", "'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi',", "\"expiry\": expiration date for this option \"name\": I don't know. I have never", "tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif (tokid == token.OP) and ((tokval", "df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity", "= down, \"chg\"? \"e\": # I think this tells us something about what", "except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader", "= u'\"%s\"' % tokval # fix single-quoted strings elif (tokid == token.STRING): if", "unquoted strings if (tokid == token.NAME): if tokval not in ['true', 'false', 'null',", "https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self,", "= token.STRING tokval = u'\"%s\"' % tokval # fix single-quoted strings elif (tokid", "tokval = u'\"%s\"' % tokval # fix single-quoted strings elif (tokid == token.STRING):", "} data = self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ])", "I have never seen a value for this \"oi\": open interest. How many", "return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix expecting property name", "output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid,", "DataReader to fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\"", "u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3,", "import COL, _get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies import to_offset", "i, expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy': expiration['y'],", "this option \"name\": I don't know. I have never seen a value for", "_, _, _ in tokengen: # fix unquoted strings if (tokid == token.NAME):", "class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google Finance Options see https://www.google.com/finance/option_chain", "\"name\": I don't know. I have never seen a value for this \"oi\":", "#response = requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code == 200: content_json", "= { 'y': date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\"", "'NaN']: tokid = token.STRING tokval = u'\"%s\"' % tokval # fix single-quoted strings", "def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json',", "= pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\",", "'d': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\"", "fix unquoted strings if (tokid == token.NAME): if tokval not in ['true', 'false',", "are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option", "output, } data = self._get_content(url, params) d = {} lst = [] for", "if response.status_code == 200: content_json = response.text data = json_decode(content_json) return(data) if __name__", "'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\"", "ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration = {u'd': 1, u'm': 12,", "coding: utf-8 -*- from .base import DataReaderBase from ..tools import COL, _get_dates, to_float,", "or \"P\" + price \"strike\": \"strike price for this option\" \"vol\": \"the volume", "col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] =", "(tokid == token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']:", "in tokengen: # fix unquoted strings if (tokid == token.NAME): if tokval not", "= u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try:", "= df #return(d) return(data) def _get_content(self, url, params): #response = requests.get(url, params=params) response", "date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\" d = {", "from .base import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import pandas", "'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"' % tokval", "for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for", "utf-8 -*- from .base import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int", "'\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string", ">>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date):", "six.moves import cStringIO as StringIO import logging import traceback import datetime import json", "tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string)", "return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret", "in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in", "u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ) df =", "if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid", "200: content_json = response.text data = json_decode(content_json) return(data) if __name__ == \"__main__\": import", "lst = [] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] =", "pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\":", "'m': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON -", "(tokid == token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"')", "ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class", "stock is traded. \"OPRA\" means USA. \"expiry\": expiration date for this option \"name\":", "ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from", "in ['Volume']: # df[col] = df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d)", "'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"' % tokval #", "I think this tells us something about what country where the stock is", "#from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO import logging import", "\"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _, _", "in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"'", "json import token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration", "return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm': ...,", ">>> expiration = {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12,", "json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for", "df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry']", "tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas", "= { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\":", "last \"s\": option code. Basically, Stock Symbol + 7 if mini option +", "params=params) if response.status_code == 200: content_json = response.text data = json_decode(content_json) return(data) if", "datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m,", "in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] lst.append(df_typ)", "from six.moves import cStringIO as StringIO import logging import traceback import datetime import", "import cStringIO as StringIO import logging import traceback import datetime import json import", "what country where the stock is traded. \"OPRA\" means USA. \"expiry\": expiration date", "Stock Symbol + 7 if mini option + date + \"C\" or \"P\"", "traded.\" } \"\"\" for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col]", "commas elif (tokid == token.OP) and ((tokval == '}') or (tokval == ']')):", "u'\"%s\"' % tokval # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith", "col in ['Volume']: # df[col] = df[col].fillna(0) #d = {} #d[\"options\"] = df", "'type': typ, 'output': output, } data = self._get_content(url, params) d = {} lst", "expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for typ in [u'puts', u'calls']:", "\"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\":", "== token.OP) and ((tokval == '}') or (tokval == ']')): if (len(result) >", "change direction. \"chg\" = up, \"chr\" = down, \"chg\"? \"e\": # I think", "['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume',", "invalid commas elif (tokid == token.OP) and ((tokval == '}') or (tokval ==", "12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014,", "DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain", "month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\" d = { 'y':", "= self._get_content(url, params) d = {} lst = [] for typ in [u'puts',", "result = [] for tokid, tokval, _, _, _ in tokengen: # fix", "i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col]", "have never seen a value for this \"oi\": open interest. How many of", "token.STRING tokval = u'\"%s\"' % tokval # fix single-quoted strings elif (tokid ==", "# fix unquoted strings if (tokid == token.NAME): if tokval not in ['true',", "{ 'y': date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle", "value for this \"oi\": open interest. How many of these are currently being", "{ 'q': symbol, 'type': typ, 'output': output, } data = self._get_content(url, params) d", ".base import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import pandas as", "pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO import logging", "'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data =", "I don't know. I have never seen a value for this \"oi\": open", "= \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ, 'output': output, } data", "remove invalid commas elif (tokid == token.OP) and ((tokval == '}') or (tokval", "lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\",", "function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result", "enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0) #d", "#d = {} #d[\"options\"] = df #return(d) return(data) def _get_content(self, url, params): #response", "elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"',", "dict like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y':", "currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code.", "\"P\" + price \"strike\": \"strike price for this option\" \"vol\": \"the volume of", "\"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\",", "\"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" = up,", "..tools import COL, _get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies import", "json_string = fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to", "\"e\": # I think this tells us something about what country where the", "data = self._get_content(url, params) d = {} lst = [] for typ in", "StringIO import logging import traceback import datetime import json import token, tokenize def", "seen a value for this \"oi\": open interest. How many of these are", "option code. Basically, Stock Symbol + 7 if mini option + date +", "','): result.pop() # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"):", "self._get_content(url, params) d = {} lst = [] for typ in [u'puts', u'calls']:", "fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix expecting property name this function", "\"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" = up, \"chr\" = down,", "symbol, 'type': typ, 'output': output, } data = self._get_content(url, params) d = {}", "# df[col] = df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d) return(data) def", "\"the volume of options traded.\" } \"\"\" for col in ['Ask', 'Bid', 'c',", "_ in tokengen: # fix unquoted strings if (tokid == token.NAME): if tokval", "\"vol\": \"the volume of options traded.\" } \"\"\" for col in ['Ask', 'Bid',", "know. I have never seen a value for this \"oi\": open interest. How", "} df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\":", "and ((tokval == '}') or (tokval == ']')): if (len(result) > 0) and", "col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col", "date >>> expiration = {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014,", "= ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0) #d = {}", "import logging import traceback import datetime import json import token, tokenize def ymd_to_date(y,", "tokenize def ymd_to_date(y, m, d): \"\"\" Returns date >>> expiration = {u'd': 1,", "self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ", "Symbol + 7 if mini option + date + \"C\" or \"P\" +", "\"C\" or \"P\" + price \"strike\": \"strike price for this option\" \"vol\": \"the", "python # -*- coding: utf-8 -*- from .base import DataReaderBase from ..tools import", "Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict", "logging import traceback import datetime import json import token, tokenize def ymd_to_date(y, m,", "import traceback import datetime import json import token, tokenize def ymd_to_date(y, m, d):", "USA. \"expiry\": expiration date for this option \"name\": I don't know. I have", "from ..tools import COL, _get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies", "#return(d) return(data) def _get_content(self, url, params): #response = requests.get(url, params=params) response = self.session.get(url,", "('\"', '\\\\\"') # remove invalid commas elif (tokid == token.OP) and ((tokval ==", "if mini option + date + \"C\" or \"P\" + price \"strike\": \"strike", "> 0) and (result[-1][1] == ','): result.pop() # fix single-quoted strings elif (tokid", "a value for this \"oi\": open interest. How many of these are currently", "expiration['d'], } data = self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ =", "3} \"\"\" d = { 'y': date.year, 'm': date.month, 'd': date.day } return(d)", "expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm':", "**kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def", "token.NAME): if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']: tokid =", "COL, _get_dates, to_float, to_int import pandas as pd #from pandas.tseries.frequencies import to_offset from", "== token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid,", "\"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\",", "# -*- coding: utf-8 -*- from .base import DataReaderBase from ..tools import COL,", "= pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i,", "#d[\"options\"] = df #return(d) return(data) def _get_content(self, url, params): #response = requests.get(url, params=params)", "{} lst = [] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type']", "+ 7 if mini option + date + \"C\" or \"P\" + price", "'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int)", "'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id'])", "df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry'])", "by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically, Stock Symbol", "for this option \"name\": I don't know. I have never seen a value", "requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code == 200: content_json = response.text", "= typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols =", "date.month, 'd': date.day } return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to", "http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically, Stock Symbol + 7 if", "DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import pandas as pd #from", "pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO", "tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result)", "params=params) response = self.session.get(url, params=params) if response.status_code == 200: content_json = response.text data", "d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type': typ, 'output': output,", "\"p\": price, last \"s\": option code. Basically, Stock Symbol + 7 if mini", "= {} lst = [] for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ])", "_get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014',", "\"cs\": change direction. \"chg\" = up, \"chr\" = down, \"chg\"? \"e\": # I", "u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration", "df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params =", "df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry']", "*args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json'))", "Returns dict like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3))", "to_offset from six.moves import cStringIO as StringIO import logging import traceback import datetime", "def json_decode(json_string): try: ret = json.loads(json_string) except: json_string = fix_lazy_json(json_string) ret = json.loads(json_string)", "\"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change", "ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0) #d = {} #d[\"options\"]", "import pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO as", "= requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code == 200: content_json =", "u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1)", "1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def", "Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi", "option\" \"vol\": \"the volume of options traded.\" } \"\"\" for col in ['Ask',", "property name this function fixes the json output from google http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen", "'expd': expiration['d'], } data = self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ", "...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1, 'd': 3} \"\"\" d", "data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0) #d =", "'}') or (tokval == ']')): if (len(result) > 0) and (result[-1][1] == ','):", "params) d = {} lst = [] for typ in [u'puts', u'calls']: df_typ", "think this tells us something about what country where the stock is traded.", "= df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']):", "\"b\": \"bid\", \"c\": \"change\", \"cid\": \"identity code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\"", "elif (tokid == token.OP) and ((tokval == '}') or (tokval == ']')): if", "tokid, tokval, _, _, _ in tokengen: # fix unquoted strings if (tokid", "json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google Finance", "output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for", "for i, expiration in enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy':", "tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif (tokid", "expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col] =", "option \"name\": I don't know. I have never seen a value for this", "df = df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\",", "= df.rename(columns=d_cols) \"\"\" d_cols = { \"a\": \"ask\", \"b\": \"bid\", \"c\": \"change\", \"cid\":", "int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for", "params = { 'q': symbol, 'type': typ, 'output': output, } data = self._get_content(url,", "df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del data[typ] for i, expiration in", "How many of these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\":", "import json import token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns date >>>", "**kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url", "tokid = token.STRING tokval = u'\"%s\"' % tokval # fix single-quoted strings elif", "result.pop() # fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval", "'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for typ in [u'puts',", "= typ lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params = {", "expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url, params) for typ in", "df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\":", "symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data = self._get_content(url,", "u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret", "https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name,", "ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']:", "df[col] = df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d) return(data) def _get_content(self,", "\"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\" d_cols = {", "= self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] =", "+ date + \"C\" or \"P\" + price \"strike\": \"strike price for this", "cStringIO as StringIO import logging import traceback import datetime import json import token,", "of these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last", "in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration) #for col in ['Volume']: # df[col] = df[col].fillna(0)", "\"\"\" for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float)", "import datetime import json import token, tokenize def ymd_to_date(y, m, d): \"\"\" Returns", "d = {} lst = [] for typ in [u'puts', u'calls']: df_typ =", "\"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols)", "'oi', 'cid']: df[col] = df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] =", "url, params): #response = requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code ==", "price \"strike\": \"strike price for this option\" \"vol\": \"the volume of options traded.\"", "-*- from .base import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import", "-*- coding: utf-8 -*- from .base import DataReaderBase from ..tools import COL, _get_dates,", "} return(d) def fix_lazy_json(in_text): \"\"\" Handle lazy JSON - to fix expecting property", "% tokval[1:-1].replace ('\"', '\\\\\"') result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret =", "never seen a value for this \"oi\": open interest. How many of these", "axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\",", "interest. How many of these are currently being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp", "tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _, _ in", "\"strike price for this option\" \"vol\": \"the volume of options traded.\" } \"\"\"", "ymd_to_date(**expiration) datetime.date(2014, 12, 1) >>> ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y,", "result.append((tokid, tokval)) return tokenize.untokenize(result) def json_decode(json_string): try: ret = json.loads(json_string) except: json_string =", "\"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" } df = df.rename(columns=d_cols) \"\"\"", "'-Infinity', 'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"' % tokval # fix", "import to_offset from six.moves import cStringIO as StringIO import logging import traceback import", "code\", \"cp\": \"cp\" \"cs\": change direction. \"chg\" = up, \"chr\" = down, \"chg\"?", "\"chg\"? \"e\": # I think this tells us something about what country where", "= {} #d[\"options\"] = df #return(d) return(data) def _get_content(self, url, params): #response =", "init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All',", "to_int import pandas as pd #from pandas.tseries.frequencies import to_offset from six.moves import cStringIO", "being held by others. See, http://www.investopedia.com/terms/o/openinterest.asp \"p\": price, last \"s\": option code. Basically,", "day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm': ..., 'd': ...}", "} \"\"\" for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']: df[col] =", "'d': 3} \"\"\" d = { 'y': date.year, 'm': date.month, 'd': date.day }", "'c', 'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']:", "for this option\" \"vol\": \"the volume of options traded.\" } \"\"\" for col", "\"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\": \"Name\" }", "'Infinity', 'NaN']: tokid = token.STRING tokval = u'\"%s\"' % tokval # fix single-quoted", "- to fix expecting property name this function fixes the json output from", "= [] for tokid, tokval, _, _, _ in tokengen: # fix unquoted", "..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010, 'm': 1,", "data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\":", "\"chg\" = up, \"chr\" = down, \"chg\"? \"e\": # I think this tells", "_get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = {", "ignore_index=True) d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\":", "pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration", "output='json', y='2014', m='12', d='1'): url = \"https://www.google.com/finance/option_chain\" params = { 'q': symbol, 'type':", "this \"oi\": open interest. How many of these are currently being held by", "= int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in enumerate(data['expirations']): data['expirations'][i] = ymd_to_date(**expiration)", "(result[-1][1] == ','): result.pop() # fix single-quoted strings elif (tokid == token.STRING): if", "data = self._get_content(url, params) for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type']", "(\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace ('\"', '\\\\\"') # remove invalid commas elif", "tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _, _ in tokengen: #", "'cp', 'Last', 'Strike']: df[col] = df[col].map(to_float) for col in ['Volume', 'oi', 'cid']: df[col]", "df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry'])", "typ lst.append(df_typ) del data[typ] lst.append(df_typ) df = pd.concat(lst, axis=0, ignore_index=True) d_cols = {", "['Volume']: # df[col] = df[col].fillna(0) #d = {} #d[\"options\"] = df #return(d) return(data)", "price, last \"s\": option code. Basically, Stock Symbol + 7 if mini option", "\"\"\" def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs):", "= fix_lazy_json(json_string) ret = json.loads(json_string) return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch", "d = { 'y': date.year, 'm': date.month, 'd': date.day } return(d) def fix_lazy_json(in_text):", "7 if mini option + date + \"C\" or \"P\" + price \"strike\":", "'\\\\\"') # remove invalid commas elif (tokid == token.OP) and ((tokval == '}')", "_get_content(self, url, params): #response = requests.get(url, params=params) response = self.session.get(url, params=params) if response.status_code", "Handle lazy JSON - to fix expecting property name this function fixes the", "Returns date >>> expiration = {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration)", "expiration = {u'd': 1, u'm': 12, u'y': 2014} >>> ymd_to_date(**expiration) datetime.date(2014, 12, 1)", "response.status_code == 200: content_json = response.text data = json_decode(content_json) return(data) if __name__ ==", "self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name, 'All', 'json')) def _get_one_raw(self,", "response = self.session.get(url, params=params) if response.status_code == 200: content_json = response.text data =", "ymd_to_date(2014, 3, 1) datetime.date(2014, 3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\"", "import DataReaderBase from ..tools import COL, _get_dates, to_float, to_int import pandas as pd", "(tokid == token.OP) and ((tokval == '}') or (tokval == ']')): if (len(result)", "strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval = u'\"%s\"' % tokval[1:-1].replace", "http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name \"\"\" tokengen = tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _,", "(len(result) > 0) and (result[-1][1] == ','): result.pop() # fix single-quoted strings elif", "3, 1) \"\"\" return(datetime.date(year=y, month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y':", "= response.text data = json_decode(content_json) return(data) if __name__ == \"__main__\": import doctest doctest.testmod()", "d_cols = { \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\",", "data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] = ymd_to_date(**data['expiry']) for i, expiration in", "lst.append(df_typ) del data[typ] for i, expiration in enumerate(data['expirations']): params = { 'q': symbol,", "volume of options traded.\" } \"\"\" for col in ['Ask', 'Bid', 'c', 'cp',", "pandas.tseries.frequencies import to_offset from six.moves import cStringIO as StringIO import logging import traceback", "= tokenize.generate_tokens(StringIO(in_text).readline) result = [] for tokid, tokval, _, _, _ in tokengen:", "{ 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd': expiration['d'], } data", "mini option + date + \"C\" or \"P\" + price \"strike\": \"strike price", "\"\"\" Returns dict like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1,", "enumerate(data['expirations']): params = { 'q': symbol, 'output': output, 'expy': expiration['y'], 'expm': expiration['m'], 'expd':", "fetch data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self,", "\"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\", \"name\":", "# fix single-quoted strings elif (tokid == token.STRING): if tokval.startswith (\"'\"): tokval =", "= df[col].map(to_int) df['Expiry'] = pd.to_datetime(df['Expiry']) data['options'] = df data['underlying_id'] = int(data['underlying_id']) data['expiry'] =", "return ret class DataReaderGoogleFinanceOptions(DataReaderBase): \"\"\" DataReader to fetch data from Google Finance Options", "{ \"a\": \"Ask\", \"b\": \"Bid\", \"p\": \"Last\", \"strike\": \"Strike\", \"expiry\": \"Expiry\", \"vol\": \"Volume\",", "def init(self, *args, **kwargs): self._get_multi = self._get_multi_todict def _get_one(self, name, *args, **kwargs): return(self._get_one_raw(name,", "#for col in ['Volume']: # df[col] = df[col].fillna(0) #d = {} #d[\"options\"] =", "like {'y': ..., 'm': ..., 'd': ...} >>> date_to_ymd(datetime.date(year=2010, month=1, day=3)) {'y': 2010,", "for typ in [u'puts', u'calls']: df_typ = pd.DataFrame(data[typ]) df_typ['Type'] = typ lst.append(df_typ) del", "data from Google Finance Options see https://www.google.com/finance/option_chain https://github.com/makmac213/python-google-option-chain http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api \"\"\" def init(self, *args,", "country where the stock is traded. \"OPRA\" means USA. \"expiry\": expiration date for", "means USA. \"expiry\": expiration date for this option \"name\": I don't know. I", "month=m, day=d)) def date_to_ymd(date): \"\"\" Returns dict like {'y': ..., 'm': ..., 'd':" ]
[ "np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l", "= np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x, y =[], []", "len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org", "keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json'", "\"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self,", "np.array([x[1] for x in x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet':", "np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for x in x_]) x_3", "elif embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for", "cout_all_line: break for line in file_csv: cout_all_line += 1 if cout_all_line > 1:", "@time : 2019/11/2 21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import", "os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] =", "load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if len_ql <= 500: #", "in x_]) x_2 = np.array([x[1] for x in x_]) x_all = [x_1, x_2]", "+ 'l2i_i2l.json' import numpy as np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式,", "else label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1", "encoding=\"utf-8\") for line in file_csv: len_all += 1 if len_all > 1: #", "Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index", "os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l", "= {} for label_one in label_set: label2index[label_one] = count index2label[count] = label_one count", "1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if", "= np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_]) x_2 = np.array([x[1]", "x_3] else: x_all, y_ = np.array(x), np.array(y) cnt = 0 yield (x_all, y_)", "None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv", "from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir +", "-*- coding: utf-8 -*- # @time : 2019/11/2 21:08 # @author : Mo", "= 0 cnt = 0 x, y = [], [] # 跳出循环 if", "label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def", "= \"NAN\" if label == \"\" else label que_embed = embed.sentence2idx(ques) label_zeros =", "que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed,", "0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all += 1", "in file_csv: cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line", "for line in file_csv: cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择", "if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {}", "embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for x", "+ 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np import os", "label_set = set() len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line", "{} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank =", "'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_])", "= 1 return que_embed, label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line", "['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x in", "{} for label_one in label_set: label2index[label_one] = count index2label[count] = label_one count =", "{} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank =", "21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from", "l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) #", "# -*- coding: utf-8 -*- # @time : 2019/11/2 21:08 # @author :", "label_one in label_set: label2index[label_one] = count index2label[count] = label_one count = count +", "return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all =", "1 return que_embed, label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line =", "que_embed, label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt", "for x in x_]) x_3 = np.array([x[2][0] for x in x_]) x_all =", "[0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv =", "l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if len_ql <=", "os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label = {} for label_one in", "for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1],", "if cnt == batch_size: if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x),", "class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if", "对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\"", "label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count =", "while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0", "keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json'", "[label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def", "else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set()", "= open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x, y =", "= l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise", "= count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label", "= open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all += 1 if len_all", "pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]]", "import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l", "process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size: if embedding_type in", "embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for", "x_]) x_3 = np.array([x[2][0] for x in x_]) x_all = [x_1, x_2, x_3]", "for x in x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_,", "label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt =", "= str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set,", "= pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise", "line in file_csv: cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line,", "l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index", "y = [], [] # 跳出循环 if len_ql < cout_all_line: break for line", "cnt = 0 x, y = [], [] # 跳出循环 if len_ql <", "l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l", "else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {}", "x_all = [x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt =", "!/usr/bin/python # -*- coding: utf-8 -*- # @time : 2019/11/2 21:08 # @author", "# 跳出循环 if len_ql < cout_all_line: break for line in file_csv: cout_all_line +=", "self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k:", "= np.array([x[1] for x in x_]) x_all = [x_1, x_2] elif embedding_type ==", "in x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_, y_ =", "if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real", "pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else:", "= embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros", "= int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all", "True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x,", "self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index =", "= x, np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0]", "load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l =", "label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql", "x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_, y_ = x, np.array(y)", "== batch_size: if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1", "cout_all_line = 0 cnt = 0 x, y = [], [] # 跳出循环", "np.array([x[0] for x in x_]) x_2 = np.array([x[1] for x in x_]) x_all", "batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if", "= [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\")", "# !/usr/bin/python # -*- coding: utf-8 -*- # @time : 2019/11/2 21:08 #", "path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy", "if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size,", "= 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all +=", "def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper()", "@function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir", "= {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank", "# 读取数据的比例 len_ql = int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练", "pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index", "= str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\" else label que_embed =", "@author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import", "def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等,", "x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_, y_ = x,", "line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org", "{} index2label = {} for label_one in label_set: label2index[label_one] = count index2label[count] =", "首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for", "None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for", "x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_]) x_2", "raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all", "np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for x in x_]) x_all", "'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for x in x_])", "1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line)", "[sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def", "process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label", "\"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type,", "= {} index2label = {} for label_one in label_set: label2index[label_one] = count index2label[count]", "x_]) x_2 = np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for x", "+= 1 if cnt == batch_size: if embedding_type in ['bert', 'albert']: x_, y_", "for label_one in label_set: label2index[label_one] = count index2label[count] = label_one count = count", "l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql =", "0 cnt = 0 x, y = [], [] # 跳出循环 if len_ql", "第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt ==", "line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close()", "key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self,", "x_1 = np.array([x[0] for x in x_]) x_2 = np.array([x[1] for x in", "str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\" else label", "pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is", "set() len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line in file_csv:", "label = str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\" else label que_embed", "for line in file_csv: len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择", "in x_]) x_all = [x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y)", "= str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\" else", "[] # 跳出循环 if len_ql < cout_all_line: break for line in file_csv: cout_all_line", "0 label2index = {} index2label = {} for label_one in label_set: label2index[label_one] =", "line in file_csv: len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp", "np.array(y) cnt = 0 yield (x_all, y_) x, y =[], [] file_csv.close() print(\"preprocess_label_ques_to_idx", "= load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l']", "if len_ql < cout_all_line: break for line in file_csv: cout_all_line += 1 if", "if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0]", "pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)):", "cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt +=", "# 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt", "k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): #", "label = \"NAN\" if label == \"\" else label que_embed = embed.sentence2idx(ques) label_zeros", "if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt", "if label == \"\" else label que_embed = embed.sentence2idx(ques) label_zeros = [0] *", "读取数据的比例 len_ql = int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql", "in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return", "for x in x_]) x_2 = np.array([x[1] for x in x_]) x_all =", "sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques =", "y.append(y_line) cnt += 1 if cnt == batch_size: if embedding_type in ['bert', 'albert']:", "len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv = open(path, \"r\",", "# @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config", "index2label[count] = label_one count = count + 1 l2i_i2l = {} l2i_i2l['l2i'] =", "y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size: if", "= np.array([x[2][0] for x in x_]) x_all = [x_1, x_2, x_3] else: x_all,", "= self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda", "x in x_]) x_all = [x_1, x_2, x_3] else: x_all, y_ = np.array(x),", "numpy as np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def", "i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)]", "file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x, y", "self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l =", "<reponame>Vail-qin/Keras-TextClassification # !/usr/bin/python # -*- coding: utf-8 -*- # @time : 2019/11/2 21:08", "x in x_]) x_all = [x_1, x_2] elif embedding_type == 'xlnet': x_, y_", "= [], [] # 跳出循环 if len_ql < cout_all_line: break for line in", "label_one count = count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l']", "k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path):", "label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return", "is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i']", "第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else", "cnt += 1 if cnt == batch_size: if embedding_type in ['bert', 'albert']: x_,", "dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label = {}", "np.array(y) x_1 = np.array([x[0] for x in x_]) x_2 = np.array([x[1] for x", "rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count", "def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv =", "1 if cnt == batch_size: if embedding_type in ['bert', 'albert']: x_, y_ =", "pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i =", "i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(),", "x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size: if embedding_type in ['bert',", "len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0", "= count index2label[count] = label_one count = count + 1 l2i_i2l = {}", "= 0 label2index = {} index2label = {} for label_one in label_set: label2index[label_one]", "count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l,", "np.array([x[2][0] for x in x_]) x_all = [x_1, x_2, x_3] else: x_all, y_", "500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\")", "# 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\")", "np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_]) x_2 = np.array([x[1] for", "= \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self,", "y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x in x_]) x_2 =", "x_2 = np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for x in", "path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not", "self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k:", "{} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l)", "= np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for x in x_])", "import numpy as np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\"", "is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0", "return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set", "x in x_]) x_2 = np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0]", "else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed,", "= index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate", "key=lambda k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self,", "label2index = {} index2label = {} for label_one in label_set: label2index[label_one] = count", "x_3 = np.array([x[2][0] for x in x_]) x_all = [x_1, x_2, x_3] else:", "pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is", "open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all += 1 if len_all >", "PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l):", "# @time : 2019/11/2 21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess", "= [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\")", "k: k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred):", "= np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for x in x_])", "in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return", "x_]) x_2 = np.array([x[1] for x in x_]) x_all = [x_1, x_2] elif", "path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if", "from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir +", "in x_]) x_3 = np.array([x[2][0] for x in x_]) x_all = [x_1, x_2,", "utf-8 -*- # @time : 2019/11/2 21:08 # @author : Mo # @function:", "else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if len_ql", "else: x_all, y_ = np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x,", "label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set,", "reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l):", "preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了,", "== \"\" else label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]]", "输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l)", "# @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index =", "range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank", "= set() len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line in", "len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label =", "1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper()", "len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) #", "cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line)", "prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in", "len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper()", "save_json from keras_textclassification.conf.path_config import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir", "x in x_]) x_2 = np.array([x[1] for x in x_]) x_all = [x_1,", "= [x_1, x_2] elif embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1", "label_set: label2index[label_one] = count index2label[count] = label_one count = count + 1 l2i_i2l", "path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np import", "= self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index", "# 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\"", "line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if label ==", "str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\" else label que_embed = embed.sentence2idx(ques)", "i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)]", "= label_one count = count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index", "def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i", "reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类", "cnt == batch_size: if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y)", "x, y = [], [] # 跳出循环 if len_ql < cout_all_line: break for", "[sorted(pred_l2i.items(), key=lambda k: k[1], reverse=True)] return pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def", "跳出循环 if len_ql < cout_all_line: break for line in file_csv: cout_all_line += 1", "if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]]", "count = count + 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] =", "pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i]", "break for line in file_csv: cout_all_line += 1 if cout_all_line > 1: #", "# 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label =", "file_csv = open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all += 1 if", "pred_l2i_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set =", "\"NAN\" if label == \"\" else label que_embed = embed.sentence2idx(ques) label_zeros = [0]", "= path_model_dir + 'l2i_i2l.json' import numpy as np import os class PreprocessGenerator: \"\"\"", "label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True:", "path_model_dir + 'l2i_i2l.json' import numpy as np import os class PreprocessGenerator: \"\"\" 数据预处理,", "y_ = np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x, y =[],", "= 0 x, y = [], [] # 跳出循环 if len_ql < cout_all_line:", "label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path,", "获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label", "= line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if label", "+= 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org =", "= {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank", "len_all = 0 file_csv = open(path, \"r\", encoding=\"utf-8\") for line in file_csv: len_all", "file_csv: cout_all_line += 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line =", "return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i", "embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用", "batch_size: if embedding_type in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 =", "open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x, y = [],", "x_2] elif embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0]", "k[1], reverse=True)] return pred_i2l_rank else: raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if", "* len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv = open(path,", "range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank", "len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp", "count index2label[count] = label_one count = count + 1 l2i_i2l = {} l2i_i2l['l2i']", "index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate *", "x_2 = np.array([x[1] for x in x_]) x_all = [x_1, x_2] elif embedding_type", "# sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques", "数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l =", "= label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例", "ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if label == \"\"", "如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label =", "in label_set: label2index[label_one] = count index2label[count] = label_one count = count + 1", "[x_1, x_2] elif embedding_type == 'xlnet': x_, y_ = x, np.array(y) x_1 =", "= path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np", "for x in x_]) x_all = [x_1, x_2, x_3] else: x_all, y_ =", "return que_embed, label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\") cout_all_line = 0", "in file_csv: len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp =", "l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(),", "[], [] # 跳出循环 if len_ql < cout_all_line: break for line in file_csv:", "path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np import os class PreprocessGenerator:", "'l2i_i2l.json' import numpy as np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques]", "import path_model_dir path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import", "= [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv", ": Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json from keras_textclassification.conf.path_config import path_model_dir", "pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)):", "path_fast_text_model_vocab2index = path_model_dir + 'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as", "x, np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for", "os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None", "<= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp =", "= np.array([x[0] for x in x_]) x_2 = np.array([x[1] for x in x_])", "1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if", "= [x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt = 0", "encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x, y = [], [] #", "x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size:", "coding: utf-8 -*- # @time : 2019/11/2 21:08 # @author : Mo #", "label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return", "+ 1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l)", "= load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all) if len_ql <= 500:", "len_ql < cout_all_line: break for line in file_csv: cout_all_line += 1 if cout_all_line", "str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real) file_csv.close() return label_set, len_all", "len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real =", "> 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line) y.append(y_line) cnt += 1", "as np import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self):", "if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): # 对每一条数据操作,获取label和问句index", "np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x, y =[], [] file_csv.close()", "# 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {}", "x_]) x_all = [x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt", "embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path) # 获取label转index字典等, 如果label2index存在则不转换了, dev验证集合的时候用 if not os.path.exists(path_fast_text_model_l2i_i2l):", "label_org label_set.add(label_real) file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1):", "if not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label = {} for", "> 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\"", "label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all = self.preprocess_get_label_set(path)", "index2label = {} for label_one in label_set: label2index[label_one] = count index2label[count] = label_one", "path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv = open(path, \"r\",", "'vocab2index.json' path_fast_text_model_l2i_i2l = path_model_dir + 'l2i_i2l.json' import numpy as np import os class", "x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt = 0 yield (x_all,", "= process_line(line) x.append(x_line) y.append(y_line) cnt += 1 if cnt == batch_size: if embedding_type", "\"r\", encoding=\"utf-8\") for line in file_csv: len_all += 1 if len_all > 1:", "= len_all def process_line(line): # 对每一条数据操作,获取label和问句index line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label", "None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l =", "\"r\", encoding=\"utf-8\") cout_all_line = 0 cnt = 0 x, y = [], []", "+= 1 if cout_all_line > 1: # 第一条是标签'label,ques',不选择 x_line, y_line = process_line(line) x.append(x_line)", "save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l = load_json(path_fast_text_model_l2i_i2l) # 读取数据的比例 len_ql = int(rate * len_all)", "prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in", "2019/11/2 21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json, save_json", "== 'xlnet': x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for x in", "x_all, y_ = np.array(x), np.array(y) cnt = 0 yield (x_all, y_) x, y", "self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l):", "1 l2i_i2l = {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else:", "len_ql = int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql =", "os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] =", "cnt = 0 yield (x_all, y_) x, y =[], [] file_csv.close() print(\"preprocess_label_ques_to_idx ok\")", "RuntimeError(\"path_fast_text_model_label2index is None\") def preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all =", "= line.split(\",\") label_org = str(line_sp[0]).strip().upper() label_real = \"NAN\" if label_org==\"\" else label_org label_set.add(label_real)", "\"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l", "file_csv.close() return label_set, len_all def preprocess_label_ques_to_idx(self, embedding_type, batch_size, path, embed, rate=1): label_set, len_all", "raise RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i", "x_, y_ = x, np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2", "-*- # @time : 2019/11/2 21:08 # @author : Mo # @function: from", "= {} l2i_i2l['l2i'] = label2index l2i_i2l['i2l'] = index2label save_json(l2i_i2l, path_fast_text_model_l2i_i2l) else: l2i_i2l =", "if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for i in range(len(pred)): pred_i2l[i2l[str(i)]]", "np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for x", "import os class PreprocessGenerator: \"\"\" 数据预处理, 输入为csv格式, [label,ques] \"\"\" def __init__(self): self.l2i_i2l =", "count = 0 label2index = {} index2label = {} for label_one in label_set:", "preprocess_get_label_set(self, path): # 首先获取label,set,即存在的具体类 label_set = set() len_all = 0 file_csv = open(path,", "label2index[label_one] = count index2label[count] = label_one count = count + 1 l2i_i2l =", "def __init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred):", "for i in range(len(pred)): pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1],", "pred_i2l[i2l[str(i)]] = pred[i] pred_i2l_rank = [sorted(pred_i2l.items(), key=lambda k: k[1], reverse=True)] return pred_i2l_rank else:", "\"\" else label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] =", "int(rate * len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def", "embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i']) label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while", "in x_]) x_2 = np.array([x[1][0] for x in x_]) x_3 = np.array([x[2][0] for", "__init__(self): self.l2i_i2l = None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if", "= None if os.path.exists(path_fast_text_model_l2i_i2l): self.l2i_i2l = load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l", "label_zeros[l2i_i2l['l2i'][label]] = 1 return que_embed, label_zeros while True: file_csv = open(path, \"r\", encoding=\"utf-8\")", "[x_1, x_2, x_3] else: x_all, y_ = np.array(x), np.array(y) cnt = 0 yield", "y_ = x, np.array(y) x_1 = np.array([x[0][0] for x in x_]) x_2 =", "= self.l2i_i2l['l2i'] for i in range(len(pred)): pred_l2i[pred[i]] = l2i[pred[i]] pred_l2i_rank = [sorted(pred_l2i.items(), key=lambda", "not os.path.exists(path_fast_text_model_l2i_i2l): count = 0 label2index = {} index2label = {} for label_one", ": 2019/11/2 21:08 # @author : Mo # @function: from keras_textclassification.data_preprocess.text_preprocess import load_json,", "len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line): #", "file_csv: len_all += 1 if len_all > 1: # 第一条是标签'label,ques',不选择 line_sp = line.split(\",\")", "x in x_]) x_3 = np.array([x[2][0] for x in x_]) x_all = [x_1,", "for x in x_]) x_2 = np.array([x[1][0] for x in x_]) x_3 =", "0 x, y = [], [] # 跳出循环 if len_ql < cout_all_line: break", "load_json(path_fast_text_model_l2i_i2l) def prereocess_idx(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_i2l = {} i2l = self.l2i_i2l['i2l'] for", "< cout_all_line: break for line in file_csv: cout_all_line += 1 if cout_all_line >", "label == \"\" else label que_embed = embed.sentence2idx(ques) label_zeros = [0] * len(l2i_i2l['l2i'])", "RuntimeError(\"path_fast_text_model_label2index is None\") def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i =", "x_1 = np.array([x[0][0] for x in x_]) x_2 = np.array([x[1][0] for x in", "* len_all) if len_ql <= 500: # sample时候不生效,使得语料足够训练 len_ql = len_all def process_line(line):", "def prereocess_pred_xid(self, pred): if os.path.exists(path_fast_text_model_l2i_i2l): pred_l2i = {} l2i = self.l2i_i2l['l2i'] for i", "line_sp = line.split(\",\") ques = str(line_sp[1]).strip().upper() label = str(line_sp[0]).strip().upper() label = \"NAN\" if", "in ['bert', 'albert']: x_, y_ = np.array(x), np.array(y) x_1 = np.array([x[0] for x" ]
[ "self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'],", "Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference images for Android", "documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd',", "source code is governed by a BSD-style license that can be # found", "of this source code is governed by a BSD-style license that can be", "AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is", "bug=716564) # Flaky for unknown reasons only on macOS. Not planning to investigate", "reasons only on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461)", "Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to", "to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) #", "on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) #", "GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): #", "2014 The Chromium Authors. All rights reserved. # Use of this source code", "supported on Android; so we skip these tests # that disables gpu compositing", "timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'],", "compositing is not supported on Android; so we skip these tests # that", "Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer',", "['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277)", "0x6613)], bug=653538) # Software compositing is not supported on Android; so we skip", "gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android'])", "investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab):", "these tests # that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker',", "license that can be # found in the LICENSE file. from gpu_tests.gpu_test_expectations import", "# Seems to be flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox',", "['win', ('amd', 0x6613)], bug=653538) # Software compositing is not supported on Android; so", "self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron)", "this source code is governed by a BSD-style license that can be #", "self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference images for Android devices", "so we skip these tests # that disables gpu compositing on Android platforms.", "reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after", "BSD-style license that can be # found in the LICENSE file. from gpu_tests.gpu_test_expectations", "class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia',", "'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only on", "Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)],", "Seems to be flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win',", "flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android',", "['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina", "# ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky on the", "['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix", "['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this configuration. self.Flaky('*',", "on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10',", "compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D',", "fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate", "['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference", "bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux',", "['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons", "bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects',", "self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is not supported on Android;", "R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is not", "bug=653538) # Software compositing is not supported on Android; so we skip these", "'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays',", "these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4', bug=615325)", "devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia',", "tests # that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android'])", "'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky on the new AMD", "images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win',", "PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)],", "unknown reasons only on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'],", "['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing", "bug=123) # Seems to be flaky on the new AMD R7 240 drivers.", "'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images", "TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'],", "after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4', bug=615325) self.Fail('Pixel_Video_VP9',", "# TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369)", "SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) #", "# Flaky for unknown reasons only on macOS. Not planning to investigate #", "Android; so we skip these tests # that disables gpu compositing on Android", "Flaky for unknown reasons only on macOS. Not planning to investigate # further.", "check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228)", "# TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang):", "Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference images for", "self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky on", "TODO(dshwang): remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325)", "self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new reference", "disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker',", "self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690)", "update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these", "# that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing',", "not supported on Android; so we skip these tests # that disables gpu", "self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this configuration.", "Authors. All rights reserved. # Use of this source code is governed by", "240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is not supported", "generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia',", "further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check /", "The Chromium Authors. All rights reserved. # Use of this source code is", "Use of this source code is governed by a BSD-style license that can", "# Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems", "TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4',", "['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) #", "'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'],", "import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self):", "/ generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac',", "# TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win',", "for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects',", "only on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker',", "('amd', 0x6613)], bug=653538) # Software compositing is not supported on Android; so we", "new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4', bug=615325) self.Fail('Pixel_Video_VP9', bug=615325)", "['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only on macOS. Not planning", "on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference images", "out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564)", "/ generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize',", "['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on", "to be flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd',", "# TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check", "reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)],", "planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663)", "this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky", "Software compositing is not supported on Android; so we skip these tests #", "0x1234)], bug=123) # Seems to be flaky on the new AMD R7 240", "self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'],", "code is governed by a BSD-style license that can be # found in", "file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class", "self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily", "that can be # found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations", "# further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check", "# TODO(dshwang): remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9',", "'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727)", "rights reserved. # Use of this source code is governed by a BSD-style", "on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android'])", "# See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample", "self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these", "self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox',", "('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out", "['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727)", "bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new reference images are", "('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux',", "we skip these tests # that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D',", "bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only on macOS.", "['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'],", "self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588) # TODO(ccameron) fix these on Mac", "drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing is not supported on", "gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def", "TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check /", "configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for", "['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov):", "def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123)", "bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura)", "All rights reserved. # Use of this source code is governed by a", "by a BSD-style license that can be # found in the LICENSE file.", "'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) #", "# found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the", "self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged',", "['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632)", "bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr):", "('nvidia', 0x1234)], bug=123) # Seems to be flaky on the new AMD R7", "class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', #", "# TODO(vmiura) check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'],", "('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize',", "is governed by a BSD-style license that can be # found in the", "a BSD-style license that can be # found in the LICENSE file. from", "# self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky", "GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox',", "self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only on macOS. Not", "check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker',", "'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update reference", "governed by a BSD-style license that can be # found in the LICENSE", "['mac'], bug=721727) # TODO(dshwang): remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4',", "self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)],", "bug=721727) # TODO(dshwang): remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325)", "from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations):", "See the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage:", "# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this", "'nvidia'], bug=716564) # Flaky for unknown reasons only on macOS. Not planning to", "# Software compositing is not supported on Android; so we skip these tests", "reserved. # Use of this source code is governed by a BSD-style license", "for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac',", "# Use of this source code is governed by a BSD-style license that", "on Android; so we skip these tests # that disables gpu compositing on", "the GpuTestExpectations class for documentation. class PixelExpectations(GpuTestExpectations): def SetExpectations(self): # Sample Usage: #", "bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images.", "found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations", "flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538)", "for unknown reasons only on macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer',", "bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171)", "'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) # TODO(junov): update", "the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for", "0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel',", "that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android'])", "Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac',", "remove these after new reference images are generated. self.Fail('Pixel_DirectComposition_Video_MP4', bug=615325) self.Fail('Pixel_DirectComposition_Video_VP9', bug=615325) self.Fail('Pixel_Video_MP4',", "is not supported on Android; so we skip these tests # that disables", "['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new reference images", "images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256) self.Fail('Pixel_OffscreenCanvasUnaccelerated2DGPUCompositingWorker', ['mac', ('nvidia', 0xfe9)], bug=706016)", "Chromium Authors. All rights reserved. # Use of this source code is governed", "can be # found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations #", "bug=735171) # TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) #", "Usage: # self.Fail('Pixel_Canvas2DRedBox', # ['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be", "be # found in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See", "skip these tests # that disables gpu compositing on Android platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android'])", "['android'], bug=521588) # TODO(ccameron) fix these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) #", "self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown", "in the LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class", "TODO(junov): update reference images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove", "self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new reference images are generated.", "bug=533690) # TODO(vmiura) check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac',", "macOS. Not planning to investigate # further. self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel',", "generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac',", "images self.Fail('Pixel_CSSFilterEffects', ['mac'], bug=721727) self.Fail('Pixel_CSSFilterEffects_NoOverlays', ['mac'], bug=721727) # TODO(dshwang): remove these after new", "['mac', 'amd', ('nvidia', 0x1234)], bug=123) # Seems to be flaky on the new", "0xfe9)], bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on", "['mac'], bug=533690) # TODO(vmiura) check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground',", "self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux', 'win', 'android'], bug=735171) #", "on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) #", "self.Flaky('Pixel_ScissorTestWithPreserveDrawingBuffer', ['mac'], bug=660461) self.Flaky('Pixel_OffscreenCanvas2DResizeOnWorker', ['win10', ('intel', 0x1912)], bug=690663) # TODO(zakerinasab): check / generate", "0x1912)], bug=690663) # TODO(zakerinasab): check / generate reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac',", "bug=706016) self.Fail('Pixel_CSSFilterEffects', ['mac', ('nvidia', 0xfe9)], bug=690277) # TODO(kbr): flakily timing out on this", "self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'], bug=521588)", "new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software compositing", "reference images. self.Fail('Pixel_Canvas2DUntagged', bug=713632) self.Flaky('Pixel_OffscreenCanvasTransferBeforeStyleResize', ['mac', 'linux', 'win', 'android'], bug=735228) self.Flaky('Pixel_OffscreenCanvasTransferAfterStyleResize', ['mac', 'linux',", "Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source", "bug=690277) # TODO(kbr): flakily timing out on this configuration. self.Flaky('*', ['linux', 'intel', 'debug'],", "the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)], bug=653538) # Software", "platforms. self.Skip('Pixel_OffscreenCanvasUnaccelerated2D', ['android']) self.Skip('Pixel_OffscreenCanvasUnaccelerated2DWorker', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositing', ['android']) self.Skip('Pixel_OffscreenCanvasWebGLSoftwareCompositingWorker', ['android']) self.Skip('Pixel_CanvasDisplayLinearRGBUnaccelerated2D', ['android']) self.Fail('Pixel_ScissorTestWithPreserveDrawingBuffer', ['android'],", "'intel', 'debug'], bug=648369) self.Flaky('Pixel_Video_MP4', ['android', 'nvidia'], bug=716564) # Flaky for unknown reasons only", "TODO(vmiura) check / generate reference images for Android devices self.Fail('Pixel_SolidColorBackground', ['mac', 'android'], bug=624256)", "these on Mac Retina self.Fail('Pixel_CSS3DBlueBox', ['mac'], bug=533690) # TODO(vmiura) check / generate reference", "be flaky on the new AMD R7 240 drivers. self.Flaky('Pixel_GpuRasterization_BlueBox', ['win', ('amd', 0x6613)],", "LICENSE file. from gpu_tests.gpu_test_expectations import GpuTestExpectations # See the GpuTestExpectations class for documentation." ]
[ "django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations =", "primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)),", "max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)),", "by Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf import", "on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb", "__future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations,", "models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial", "class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [", "], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'), ), ]", "to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False,", "models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED',", "import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models", "True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id',", "('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')),", "name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)),", "-*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08 22:54 from", "] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year',", "django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING',", "('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2,", "('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number',", "on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={", "# -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08 22:54", "22:54 from __future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db", "models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload',", "unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import", "migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL),", "models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ],", "migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'), ),", "Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf", "('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True,", "('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at',", "dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True,", "('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'),", "models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at',", "('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source',", "models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()),", "('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[", "max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),", "('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status',", "django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class", "django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True", "to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload',", "'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,", "initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem',", "models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE,", "('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'),", "from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies", "= True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[", "('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')],", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category',", "models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)),", "models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING',", "import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial =", "[ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False,", "('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces',", "serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results',", "('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'),", "migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata',", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw',", "models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)),", "verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)),", "import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [", "operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()),", "serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)),", "import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations", "'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)),", "models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ),", "django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies =", "primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2,", "import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration):", "models import django.db.models.deletion class Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ]", "('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ],", "max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True,", "default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+',", "Migration(migrations.Migration): initial = True dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel(", "'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True,", "], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at',", "verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent',", "'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by',", "name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()),", "('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)),", "<filename>examples/p02_budgets/budget_data_ingest/migrations/0001_initial.py # -*- coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08", "Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf import settings", "coding: utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__", "models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED',", "related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract':", "('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, },", "# Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from", "from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import migrations, models import django.db.models.deletion", "utf-8 -*- # Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__ import", "related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem',", "('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter',", "2018-06-08 22:54 from __future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from", "-*- # Generated by Django 1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals", "on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField(", "models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='budgetitem', name='upload', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='budget_data_ingest.Upload'),", "'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by',", "('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED', 'Staged'),", "= [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency',", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()),", "models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()), ('data_source', models.TextField()), ('category', models.TextField()), ('dollars_budgeted',", "('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "), migrations.CreateModel( name='Upload', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)),", "('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='replaced_by', to='budget_data_ingest.Upload')), ('status_changed_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),", "from __future__ import unicode_literals from django.conf import settings import django.contrib.postgres.fields.jsonb from django.db import", "('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')),", "= [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ] operations = [ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True,", "fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created_at', models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file',", "1.11.13 on 2018-06-08 22:54 from __future__ import unicode_literals from django.conf import settings import", "django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING', 'Loading'), ('PENDING', 'Pending'), ('STAGED',", "('STAGED', 'Staged'), ('INSERTED', 'Inserted'), ('DELETED', 'Deleted')], default='LOADING', max_length=10)), ('status_changed_at', models.DateTimeField(null=True)), ('replaces', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE,", "[ migrations.CreateModel( name='BudgetItem', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('year', models.IntegerField()), ('agency', models.TextField()),", "models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='+', to=settings.AUTH_USER_MODEL)), ('submitter', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)), ], options={ 'abstract': False, }, ),", "models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel( name='Upload', fields=[ ('id',", "models.DateTimeField(auto_now_add=True)), ('updated_at', models.DateTimeField(auto_now=True)), ('file_metadata', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('file', models.FileField(upload_to='')), ('raw', models.BinaryField(null=True)), ('validation_results', django.contrib.postgres.fields.jsonb.JSONField(null=True)), ('status', models.CharField(choices=[('LOADING',", "('category', models.TextField()), ('dollars_budgeted', models.DecimalField(decimal_places=2, max_digits=14)), ('dollars_spent', models.DecimalField(decimal_places=2, max_digits=14)), ('row_number', models.IntegerField()), ], ), migrations.CreateModel(" ]
[ "import setuptools #enables develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of", "implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(),", "on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn'", "based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy',", ": A NumPy implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT", "SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖", "description='PySVM : A NumPy implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'],", "setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM based on SMO", "SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn' ],", "#enables develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM based", "name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM based on SMO algorithm',", "algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn' ], url='https://github.com/Kaslanarian/PySVM',", "A NumPy implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License',", "author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[ #自动安装依赖 'numpy', 'sklearn' ], url='https://github.com/Kaslanarian/PySVM', )", "version='0.1', description='PySVM : A NumPy implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\",", "of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md', encoding='utf-8').read(), install_requires=[", "setuptools #enables develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM", "develop setuptools.setup( name='pysvm', version='0.1', description='PySVM : A NumPy implementation of SVM based on", "NumPy implementation of SVM based on SMO algorithm', author_email=\"<EMAIL>\", packages=['pysvm'], license='MIT License', long_description=open('README.md'," ]
[ "CWD_PATH = os.getcwd() # Path to frozen detection graph .pb file, which contains", "that is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to", "category names, so that when our convolution # network predicts `5`, we know", "= fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and output", "in the column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded =", "Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's", "# Load image using OpenCV and # expand image dimensions to have shape:", "labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index", "but anything that returns a # dictionary mapping integers to appropriate string labels", "score represents level of confidence for each of the objects. # The score", "tensorflow as tf import sys # This is needed since the notebook is", "from utils import label_map_util from utils import visualization_utils as vis_util # Name of", "the object detector can identify NUM_CLASSES = 6 # Load the label map.", "scores around the objects of interest in the image. ## Some of the", "Classifier ######### # # Author: <NAME> # Date: 1/15/18 # Description: # This", "fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input", "at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it more understandable", "= tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data) for the object", "# Perform the actual detection by running the model with the image as", "OpenCV and # expand image dimensions to have shape: [1, None, None, 3]", "# Number of classes the object detector can identify NUM_CLASSES = 6 #", "classifier to perform object detection. # It loads the classifier uses it to", "Define input and output tensors (i.e. data) for the object detection classifier #", "maps map indices to category names, so that when our convolution # network", "has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) #", "stored in the object_detection folder. sys.path.append(\"..\") # Import utilites from utils import label_map_util", "num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results", "min_score_thresh=0.60) # All the results have been drawn on image. Now display the", "on image. Now display the image. cv2.imshow('Object detector', image) # Press any key", "network predicts `5`, we know that this corresponds to `king`. # Here we", "know that this corresponds to `king`. # Here we use internal utility functions,", "tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data) for the object detection", "functions, but anything that returns a # dictionary mapping integers to appropriate string", "This is needed since the notebook is stored in the object_detection folder. sys.path.append(\"..\")", "tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph =", "directory containing the object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME =", "objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand", "dictionary mapping integers to appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS)", "the object detection classifier # Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0')", "to category names, so that when our convolution # network predicts `5`, we", "be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories)", "Import utilites from utils import label_map_util from utils import visualization_utils as vis_util #", "name='') sess = tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data) for", "# Draw the results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image,", "Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE", "data) for the object detection classifier # Input tensor is the image image_tensor", "3] # i.e. a single-column array, where each item in the column has", "as np import tensorflow as tf import sys # This is needed since", "using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current working", "'test1.jpg' # Grab path to current working directory CWD_PATH = os.getcwd() # Path", "model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT,", "# Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are", "image where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score", "serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and", "sys # This is needed since the notebook is stored in the object_detection", "`king`. # Here we use internal utility functions, but anything that returns a", "copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from", "a part of the image where a particular object was detected detection_boxes =", "each of the objects. # The score is shown on the result image,", "of classes the object detector can identify NUM_CLASSES = 6 # Load the", "image_expanded}) # Draw the results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array(", "example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's example", "make it more understandable to me. # Import packages import os import cv2", "convolution # network predicts `5`, we know that this corresponds to `king`. #", "np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been drawn", "= np.expand_dims(image, axis=0) # Perform the actual detection by running the model with", "image using OpenCV and # expand image dimensions to have shape: [1, None,", "detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for each", "## but I changed it to make it more understandable to me. #", "image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can identify", "It draws boxes and scores around the objects of interest in the image.", "use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph =", "The score is shown on the result image, together with the class label.", "to perform object detection. # It loads the classifier uses it to perform", "the label map. # Label maps map indices to category names, so that", "map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) #", "detection on an image. # It draws boxes and scores around the objects", "numpy as np import tensorflow as tf import sys # This is needed", "detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand image", "boxes and scores around the objects of interest in the image. ## Some", "= 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current working directory CWD_PATH", "corresponds to `king`. # Here we use internal utility functions, but anything that", "(boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) #", "scores, and classes # Each box represents a part of the image where", "Each score represents level of confidence for each of the objects. # The", "(aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60)", "shape: [1, None, None, 3] # i.e. a single-column array, where each item", "the directory containing the object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME", "detection classifier # Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output", "around the objects of interest in the image. ## Some of the code", "utils import visualization_utils as vis_util # Name of the directory containing the object", "drawn on image. Now display the image. cv2.imshow('Object detector', image) # Press any", "classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the", "to perform object detection on an image. # It draws boxes and scores", "this corresponds to `king`. # Here we use internal utility functions, but anything", "# Grab path to current working directory CWD_PATH = os.getcwd() # Path to", "Label maps map indices to category names, so that when our convolution #", "we know that this corresponds to `king`. # Here we use internal utility", "Object Detection Using Tensorflow-trained Classifier ######### # # Author: <NAME> # Date: 1/15/18", "of confidence for each of the objects. # The score is shown on", "tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection", "Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it", "= 6 # Load the label map. # Label maps map indices to", "image dimensions to have shape: [1, None, None, 3] # i.e. a single-column", "Load the label map. # Label maps map indices to category names, so", "where each item in the column has the pixel RGB value image =", "on an image. # It draws boxes and scores around the objects of", "detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for each of the objects.", "detection by running the model with the image as input (boxes, scores, classes,", "a # dictionary mapping integers to appropriate string labels would be fine label_map", "# It loads the classifier uses it to perform object detection on an", "import visualization_utils as vis_util # Name of the directory containing the object detection", "pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the", "# network predicts `5`, we know that this corresponds to `king`. # Here", "module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to", "the object_detection folder. sys.path.append(\"..\") # Import utilites from utils import label_map_util from utils", "= os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can identify NUM_CLASSES =", "os.getcwd() # Path to frozen detection graph .pb file, which contains the model", "graph .pb file, which contains the model that is used # for object", "utils import label_map_util from utils import visualization_utils as vis_util # Name of the", "detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8,", "part of the image where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0')", "# Name of the directory containing the object detection module we're using MODEL_NAME", "visualization_utils as vis_util # Name of the directory containing the object detection module", "detection boxes, scores, and classes # Each box represents a part of the", "the results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32),", "have been drawn on image. Now display the image. cv2.imshow('Object detector', image) #", "our convolution # network predicts `5`, we know that this corresponds to `king`.", "np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been", "with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess =", "import tensorflow as tf import sys # This is needed since the notebook", "Grab path to current working directory CWD_PATH = os.getcwd() # Path to frozen", "## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's example at ##", "https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it more understandable to me.", "but I changed it to make it more understandable to me. # Import", "object detection on an image. # It draws boxes and scores around the", "so that when our convolution # network predicts `5`, we know that this", "file, which contains the model that is used # for object detection. PATH_TO_CKPT", "detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path", "the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects", "os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the", "num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the detection (aka 'visulaize the", "to current working directory CWD_PATH = os.getcwd() # Path to frozen detection graph", "NUM_CLASSES = 6 # Load the label map. # Label maps map indices", "label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into memory.", "label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME)", "max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph", "= tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph", "# Each box represents a part of the image where a particular object", "is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed", "# Output tensors are the detection boxes, scores, and classes # Each box", "the notebook is stored in the object_detection folder. sys.path.append(\"..\") # Import utilites from", "os import cv2 import numpy as np import tensorflow as tf import sys", "1/15/18 # Description: # This program uses a TensorFlow-trained classifier to perform object", "object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS =", "the column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image,", "of interest in the image. ## Some of the code is copied from", "image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have", "the object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' #", "internal utility functions, but anything that returns a # dictionary mapping integers to", "classes # Each box represents a part of the image where a particular", "score is shown on the result image, together with the class label. detection_scores", "image as input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections],", "using OpenCV and # expand image dimensions to have shape: [1, None, None,", "object detection classifier # Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') #", "= detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand image dimensions to", "detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using", "sys.path.append(\"..\") # Import utilites from utils import label_map_util from utils import visualization_utils as", "represents a part of the image where a particular object was detected detection_boxes", "od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and output tensors (i.e.", "single-column array, where each item in the column has the pixel RGB value", "we use internal utility functions, but anything that returns a # dictionary mapping", "detector can identify NUM_CLASSES = 6 # Load the label map. # Label", "Description: # This program uses a TensorFlow-trained classifier to perform object detection. #", "used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map", "boxes, scores, and classes # Each box represents a part of the image", "path to current working directory CWD_PATH = os.getcwd() # Path to frozen detection", "fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) #", "`5`, we know that this corresponds to `king`. # Here we use internal", "I changed it to make it more understandable to me. # Import packages", "# Author: <NAME> # Date: 1/15/18 # Description: # This program uses a", "packages import os import cv2 import numpy as np import tensorflow as tf", "# Here we use internal utility functions, but anything that returns a #", "(i.e. data) for the object detection classifier # Input tensor is the image", "it more understandable to me. # Import packages import os import cv2 import", "'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) #", "detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') #", "uses it to perform object detection on an image. # It draws boxes", "import numpy as np import tensorflow as tf import sys # This is", "# Import utilites from utils import label_map_util from utils import visualization_utils as vis_util", "the objects. # The score is shown on the result image, together with", "IMAGE_NAME = 'test1.jpg' # Grab path to current working directory CWD_PATH = os.getcwd()", "import cv2 import numpy as np import tensorflow as tf import sys #", "we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current", "on the result image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes", "# Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object", "indices to category names, so that when our convolution # network predicts `5`,", "detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph)", "display the image. cv2.imshow('Object detector', image) # Press any key to close the", "from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat", "= label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into", "PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of", "import label_map_util from utils import visualization_utils as vis_util # Name of the directory", "# expand image dimensions to have shape: [1, None, None, 3] # i.e.", "######## Image Object Detection Using Tensorflow-trained Classifier ######### # # Author: <NAME> #", "object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence", "been drawn on image. Now display the image. cv2.imshow('Object detector', image) # Press", "with the image as input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores,", "when our convolution # network predicts `5`, we know that this corresponds to", "object_detection folder. sys.path.append(\"..\") # Import utilites from utils import label_map_util from utils import", "confidence for each of the objects. # The score is shown on the", "Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector", "where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents", "= label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph = tf.Graph() with", "# # Author: <NAME> # Date: 1/15/18 # Description: # This program uses", "<NAME> # Date: 1/15/18 # Description: # This program uses a TensorFlow-trained classifier", "# Label maps map indices to category names, so that when our convolution", "= sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of", "image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection by", "None, 3] # i.e. a single-column array, where each item in the column", "the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores,", "copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it", "'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current working directory CWD_PATH =", "## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it more understandable to", "to `king`. # Here we use internal utility functions, but anything that returns", "results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the", "the image where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each", "of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index,", "loads the classifier uses it to perform object detection on an image. #", "line_thickness=8, min_score_thresh=0.60) # All the results have been drawn on image. Now display", "to appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map,", "vis_util # Name of the directory containing the object detection module we're using", "# i.e. a single-column array, where each item in the column has the", "= 'test1.jpg' # Grab path to current working directory CWD_PATH = os.getcwd() #", "tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph)", "result image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0')", "# Press any key to close the image cv2.waitKey(0) # Clean up cv2.destroyAllWindows()", "np.expand_dims(image, axis=0) # Perform the actual detection by running the model with the", "which contains the model that is used # for object detection. PATH_TO_CKPT =", "MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab path to current working directory", "to me. # Import packages import os import cv2 import numpy as np", "image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') #", "https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py", "box represents a part of the image where a particular object was detected", "since the notebook is stored in the object_detection folder. sys.path.append(\"..\") # Import utilites", "image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores, and", "os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to", "the detection boxes, scores, and classes # Each box represents a part of", "label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow", "[detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the detection", "label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default():", "to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can", "for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS", "cv2.imshow('Object detector', image) # Press any key to close the image cv2.waitKey(0) #", "from utils import visualization_utils as vis_util # Name of the directory containing the", "me. # Import packages import os import cv2 import numpy as np import", "# This is needed since the notebook is stored in the object_detection folder.", "and some is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but", "would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index =", "directory CWD_PATH = os.getcwd() # Path to frozen detection graph .pb file, which", "image) # Press any key to close the image cv2.waitKey(0) # Clean up", "with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read()", "## and some is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ##", "column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0)", "TensorFlow-trained classifier to perform object detection. # It loads the classifier uses it", "os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can identify NUM_CLASSES = 6", "each item in the column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE)", "object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg' # Grab", "is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label", "It loads the classifier uses it to perform object detection on an image.", "detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores, and classes # Each", "detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt')", "is shown on the result image, together with the class label. detection_scores =", "is needed since the notebook is stored in the object_detection folder. sys.path.append(\"..\") #", "image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores, and classes", "input and output tensors (i.e. data) for the object detection classifier # Input", "detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load", "the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform", "np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been drawn on", "Import packages import os import cv2 import numpy as np import tensorflow as", "vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results", "######### # # Author: <NAME> # Date: 1/15/18 # Description: # This program", "Perform the actual detection by running the model with the image as input", "# It draws boxes and scores around the objects of interest in the", "is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes,", "mapping integers to appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories", "[1, None, None, 3] # i.e. a single-column array, where each item in", "Some of the code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ##", "into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb')", "notebook is stored in the object_detection folder. sys.path.append(\"..\") # Import utilites from utils", "classes the object detector can identify NUM_CLASSES = 6 # Load the label", "detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for each of", "# The score is shown on the result image, together with the class", "utility functions, but anything that returns a # dictionary mapping integers to appropriate", "Now display the image. cv2.imshow('Object detector', image) # Press any key to close", "expand image dimensions to have shape: [1, None, None, 3] # i.e. a", "as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define", "Detection Using Tensorflow-trained Classifier ######### # # Author: <NAME> # Date: 1/15/18 #", "the objects of interest in the image. ## Some of the code is", "objects. # The score is shown on the result image, together with the", "returns a # dictionary mapping integers to appropriate string labels would be fine", "detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand image dimensions to have", "use internal utility functions, but anything that returns a # dictionary mapping integers", "image. Now display the image. cv2.imshow('Object detector', image) # Press any key to", "shown on the result image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0')", "memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as", "program uses a TensorFlow-trained classifier to perform object detection. # It loads the", "identify NUM_CLASSES = 6 # Load the label map. # Label maps map", "Each box represents a part of the image where a particular object was", "together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number", "# Each score represents level of confidence for each of the objects. #", "feed_dict={image_tensor: image_expanded}) # Draw the results of the detection (aka 'visulaize the results')", "of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and #", "and classes # Each box represents a part of the image where a", "use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been drawn on image. Now", "# Import packages import os import cv2 import numpy as np import tensorflow", "model that is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path", "= label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the", "num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and # expand image dimensions", "Load image using OpenCV and # expand image dimensions to have shape: [1,", "object detection. # It loads the classifier uses it to perform object detection", "that returns a # dictionary mapping integers to appropriate string labels would be", "tensors are the detection boxes, scores, and classes # Each box represents a", "categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model", "label map. # Label maps map indices to category names, so that when", "classifier # Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors", "tf import sys # This is needed since the notebook is stored in", "Author: <NAME> # Date: 1/15/18 # Description: # This program uses a TensorFlow-trained", "output tensors (i.e. data) for the object detection classifier # Input tensor is", "file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number", "label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True) category_index = label_map_util.create_category_index(categories) # Load", "PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') #", "an image. # It draws boxes and scores around the objects of interest", "# Load the label map. # Label maps map indices to category names,", "# Path to frozen detection graph .pb file, which contains the model that", "category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All the results have been drawn on image.", "Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with", "image. ## Some of the code is copied from Google's example at ##", "image. cv2.imshow('Object detector', image) # Press any key to close the image cv2.waitKey(0)", "object detector can identify NUM_CLASSES = 6 # Load the label map. #", "None, None, 3] # i.e. a single-column array, where each item in the", "Image Object Detection Using Tensorflow-trained Classifier ######### # # Author: <NAME> # Date:", "RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual", "<gh_stars>1-10 ######## Image Object Detection Using Tensorflow-trained Classifier ######### # # Author: <NAME>", "Number of classes the object detector can identify NUM_CLASSES = 6 # Load", "= detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image", "image. # It draws boxes and scores around the objects of interest in", "i.e. a single-column array, where each item in the column has the pixel", "to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE =", ".pb file, which contains the model that is used # for object detection.", "value image = cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection", "tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess", "understandable to me. # Import packages import os import cv2 import numpy as", "as vis_util # Name of the directory containing the object detection module we're", "Date: 1/15/18 # Description: # This program uses a TensorFlow-trained classifier to perform", "class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected", "it to make it more understandable to me. # Import packages import os", "# Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image", "## Some of the code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb", "dimensions to have shape: [1, None, None, 3] # i.e. a single-column array,", "scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw", "the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) # All", "detector', image) # Press any key to close the image cv2.waitKey(0) # Clean", "in the image. ## Some of the code is copied from Google's example", "predicts `5`, we know that this corresponds to `king`. # Here we use", "example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make it more", "folder. sys.path.append(\"..\") # Import utilites from utils import label_map_util from utils import visualization_utils", "draws boxes and scores around the objects of interest in the image. ##", "# Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV", "results have been drawn on image. Now display the image. cv2.imshow('Object detector', image)", "by running the model with the image as input (boxes, scores, classes, num)", "the image. ## Some of the code is copied from Google's example at", "map. # Label maps map indices to category names, so that when our", "that this corresponds to `king`. # Here we use internal utility functions, but", "# This program uses a TensorFlow-trained classifier to perform object detection. # It", "frozen detection graph .pb file, which contains the model that is used #", "and scores around the objects of interest in the image. ## Some of", "is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied", "Load the Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def =", "a TensorFlow-trained classifier to perform object detection. # It loads the classifier uses", "uses a TensorFlow-trained classifier to perform object detection. # It loads the classifier", "needed since the notebook is stored in the object_detection folder. sys.path.append(\"..\") # Import", "detection graph .pb file, which contains the model that is used # for", "detection. # It loads the classifier uses it to perform object detection on", "cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection by running the", "contains the model that is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb')", "Tensorflow-trained Classifier ######### # # Author: <NAME> # Date: 1/15/18 # Description: #", "as tf import sys # This is needed since the notebook is stored", "All the results have been drawn on image. Now display the image. cv2.imshow('Object", "appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES,", "= detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the detection boxes, scores, and classes #", "code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is", "Output tensors are the detection boxes, scores, and classes # Each box represents", "from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to", "import os import cv2 import numpy as np import tensorflow as tf import", "import sys # This is needed since the notebook is stored in the", "and # expand image dimensions to have shape: [1, None, None, 3] #", "od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def,", "# Define input and output tensors (i.e. data) for the object detection classifier", "the model with the image as input (boxes, scores, classes, num) = sess.run(", "a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level", "containing the object detection module we're using MODEL_NAME = 'inference_graph' IMAGE_NAME = 'test1.jpg'", "# All the results have been drawn on image. Now display the image.", "have shape: [1, None, None, 3] # i.e. a single-column array, where each", "for each of the objects. # The score is shown on the result", "with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of", "some is copied from Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I", "to have shape: [1, None, None, 3] # i.e. a single-column array, where", "more understandable to me. # Import packages import os import cv2 import numpy", "was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for", "label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections", "# Description: # This program uses a TensorFlow-trained classifier to perform object detection.", "results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores),", "the code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some", "the results have been drawn on image. Now display the image. cv2.imshow('Object detector',", "integers to appropriate string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories =", "detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections =", "of the directory containing the object detection module we're using MODEL_NAME = 'inference_graph'", "= tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid: serialized_graph = fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='')", "array, where each item in the column has the pixel RGB value image", "image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection by running the model", "changed it to make it more understandable to me. # Import packages import", "for the object detection classifier # Input tensor is the image image_tensor =", "detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the detection (aka 'visulaize", "of the objects. # The score is shown on the result image, together", "= cv2.imread(PATH_TO_IMAGE) image_expanded = np.expand_dims(image, axis=0) # Perform the actual detection by running", "This program uses a TensorFlow-trained classifier to perform object detection. # It loads", "to frozen detection graph .pb file, which contains the model that is used", "'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True, line_thickness=8, min_score_thresh=0.60) #", "# dictionary mapping integers to appropriate string labels would be fine label_map =", "Using Tensorflow-trained Classifier ######### # # Author: <NAME> # Date: 1/15/18 # Description:", "fid.read() od_graph_def.ParseFromString(serialized_graph) tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and output tensors", "perform object detection on an image. # It draws boxes and scores around", "string labels would be fine label_map = label_map_util.load_labelmap(PATH_TO_LABELS) categories = label_map_util.convert_label_map_to_categories(label_map, max_num_classes=NUM_CLASSES, use_display_name=True)", "tf.import_graph_def(od_graph_def, name='') sess = tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data)", "= detection_graph.get_tensor_by_name('detection_scores:0') detection_classes = detection_graph.get_tensor_by_name('detection_classes:0') # Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0')", "cv2 import numpy as np import tensorflow as tf import sys # This", "# Load the Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def", "of the image where a particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') #", "detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the detection (aka", "input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded})", "map indices to category names, so that when our convolution # network predicts", "is stored in the object_detection folder. sys.path.append(\"..\") # Import utilites from utils import", "6 # Load the label map. # Label maps map indices to category", "Number of objects detected num_detections = detection_graph.get_tensor_by_name('num_detections:0') # Load image using OpenCV and", "# Date: 1/15/18 # Description: # This program uses a TensorFlow-trained classifier to", "as input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor:", "at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and some is copied from Dat Tran's example at", "item in the column has the pixel RGB value image = cv2.imread(PATH_TO_IMAGE) image_expanded", "= os.getcwd() # Path to frozen detection graph .pb file, which contains the", "Name of the directory containing the object detection module we're using MODEL_NAME =", "tensors (i.e. data) for the object detection classifier # Input tensor is the", "actual detection by running the model with the image as input (boxes, scores,", "a single-column array, where each item in the column has the pixel RGB", "objects of interest in the image. ## Some of the code is copied", "it to perform object detection on an image. # It draws boxes and", "the image. cv2.imshow('Object detector', image) # Press any key to close the image", "represents level of confidence for each of the objects. # The score is", "of the code is copied from Google's example at ## https://github.com/tensorflow/models/blob/master/research/object_detection/object_detection_tutorial.ipynb ## and", "interest in the image. ## Some of the code is copied from Google's", "= os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file PATH_TO_LABELS = os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path", "detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef() with tf.gfile.GFile(PATH_TO_CKPT, 'rb') as fid:", "the result image, together with the class label. detection_scores = detection_graph.get_tensor_by_name('detection_scores:0') detection_classes =", "working directory CWD_PATH = os.getcwd() # Path to frozen detection graph .pb file,", "= detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of confidence for each of the", "model with the image as input (boxes, scores, classes, num) = sess.run( [detection_boxes,", "= os.path.join(CWD_PATH,'training','labelmap.pbtxt') # Path to image PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes", "and output tensors (i.e. data) for the object detection classifier # Input tensor", "classifier uses it to perform object detection on an image. # It draws", "in the object_detection folder. sys.path.append(\"..\") # Import utilites from utils import label_map_util from", "label_map_util from utils import visualization_utils as vis_util # Name of the directory containing", "Path to frozen detection graph .pb file, which contains the model that is", "axis=0) # Perform the actual detection by running the model with the image", "Dat Tran's example at ## https://github.com/datitran/object_detector_app/blob/master/object_detection_app.py ## but I changed it to make", "names, so that when our convolution # network predicts `5`, we know that", "are the detection boxes, scores, and classes # Each box represents a part", "can identify NUM_CLASSES = 6 # Load the label map. # Label maps", "PATH_TO_IMAGE = os.path.join(CWD_PATH,IMAGE_NAME) # Number of classes the object detector can identify NUM_CLASSES", "the actual detection by running the model with the image as input (boxes,", "utilites from utils import label_map_util from utils import visualization_utils as vis_util # Name", "sess.run( [detection_boxes, detection_scores, detection_classes, num_detections], feed_dict={image_tensor: image_expanded}) # Draw the results of the", "the model that is used # for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') #", "that when our convolution # network predicts `5`, we know that this corresponds", "# for object detection. PATH_TO_CKPT = os.path.join(CWD_PATH,MODEL_NAME,'frozen_inference_graph.pb') # Path to label map file", "particular object was detected detection_boxes = detection_graph.get_tensor_by_name('detection_boxes:0') # Each score represents level of", "the classifier uses it to perform object detection on an image. # It", "perform object detection. # It loads the classifier uses it to perform object", "the image as input (boxes, scores, classes, num) = sess.run( [detection_boxes, detection_scores, detection_classes,", "anything that returns a # dictionary mapping integers to appropriate string labels would", "sess = tf.Session(graph=detection_graph) # Define input and output tensors (i.e. data) for the", "current working directory CWD_PATH = os.getcwd() # Path to frozen detection graph .pb", "Draw the results of the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes),", "Here we use internal utility functions, but anything that returns a # dictionary", "the detection (aka 'visulaize the results') vis_util.visualize_boxes_and_labels_on_image_array( image, np.squeeze(boxes), np.squeeze(classes).astype(np.int32), np.squeeze(scores), category_index, use_normalized_coordinates=True,", "Input tensor is the image image_tensor = detection_graph.get_tensor_by_name('image_tensor:0') # Output tensors are the", "np import tensorflow as tf import sys # This is needed since the", "running the model with the image as input (boxes, scores, classes, num) =", "category_index = label_map_util.create_category_index(categories) # Load the Tensorflow model into memory. detection_graph = tf.Graph()", "level of confidence for each of the objects. # The score is shown", "to make it more understandable to me. # Import packages import os import", "the Tensorflow model into memory. detection_graph = tf.Graph() with detection_graph.as_default(): od_graph_def = tf.GraphDef()" ]
[ "data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv'", "from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay", "Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay", "import BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name", "council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv'", "= 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections", "class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version", "= 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08'] csv_delimiter", "'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections =", "addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08']", "BaseXpressDemocracyClubCsvImporter class Command(BaseXpressDemocracyClubCsvImporter): council_id = 'E06000027' addresses_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name =", "1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08'] csv_delimiter = '\\t'", "'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' stations_name = 'parl.2017-06-08/Version 1/Torbay Democracy_Club__08June2017.tsv' elections = ['parl.2017-06-08'] csv_delimiter =" ]
[ "def get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class", "def search(request): query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query))", "import Q from django.shortcuts import render from django.http import Http404 # Create your", ".models import Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self,", "if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data)", "import api_view from .models import Product, Category from .serializers import ProductSerializer, CategorySerializer class", "import Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request,", "except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None): category = self.get_object(category_slug)", "query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else:", "Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else: return Response({\"products\": []})", "Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer", "format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView):", "product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class", "'') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return", "= request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products,", "return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise", "import Response from rest_framework.decorators import api_view from .models import Product, Category from .serializers", "Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format= None): product", "request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True)", "self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try:", "your views here. from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators", "django.http import Http404 # Create your views here. from rest_framework.views import APIView from", "None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query", "Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try:", "= CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if query:", "= Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug):", "raise Http404 def get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug)", "search(request): query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer", "category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug,", "products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug,", "# Create your views here. from rest_framework.views import APIView from rest_framework.response import Response", "= ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except", "request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def", "render from django.http import Http404 # Create your views here. from rest_framework.views import", "serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug)", "return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None): category", "ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except", "CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self,", "product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return", "Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404", "@api_view(['POST']) def search(request): query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) |", "class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return", "from rest_framework.decorators import api_view from .models import Product, Category from .serializers import ProductSerializer,", "get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product)", "from django.db.models import Q from django.shortcuts import render from django.http import Http404 #", "= self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug):", "django.shortcuts import render from django.http import Http404 # Create your views here. from", "Http404 # Create your views here. from rest_framework.views import APIView from rest_framework.response import", "ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def", "format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self,", "Http404 def get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category)", "from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators import api_view from", "get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug,", "def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request,", "format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request):", "from django.http import Http404 # Create your views here. from rest_framework.views import APIView", "Q from django.shortcuts import render from django.http import Http404 # Create your views", "request, category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST'])", "serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if", "None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def", "views here. from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators import", "api_view from .models import Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView):", "category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def", "Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None): category =", "django.db.models import Q from django.shortcuts import render from django.http import Http404 # Create", "CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if query: products", "return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format= None):", "ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist:", "CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True)", "= ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug)", "ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer =", "Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query)", "Response from rest_framework.decorators import api_view from .models import Product, Category from .serializers import", "category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format=", "here. from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators import api_view", "APIView from rest_framework.response import Response from rest_framework.decorators import api_view from .models import Product,", "import render from django.http import Http404 # Create your views here. from rest_framework.views", "product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug,", "serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return", "request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return", "try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format=", "Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None):", "from .models import Product, Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def", "Category from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products", "return Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist:", "def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self,", "def get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer =", "import Http404 # Create your views here. from rest_framework.views import APIView from rest_framework.response", "Response(serializer.data) class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise", "Http404 def get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer", "from django.shortcuts import render from django.http import Http404 # Create your views here.", "def get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return", "LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data)", "from rest_framework.response import Response from rest_framework.decorators import api_view from .models import Product, Category", "get(self, request, format=None): products = Product.objects.all()[0:4] serializer = ProductSerializer(products,many=True) return Response(serializer.data) class ProductDetail(APIView):", "category_slug, product_slug, format= None): product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data)", "= Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else: return Response({\"products\":", "products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer = ProductSerializer(products, many=True) return Response(serializer.data) else: return", "get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404 def get(self, request,", "rest_framework.decorators import api_view from .models import Product, Category from .serializers import ProductSerializer, CategorySerializer", ".serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4]", "import APIView from rest_framework.response import Response from rest_framework.decorators import api_view from .models import", "category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query =", "get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data)", "raise Http404 def get(self, request, category_slug, format= None): category = self.get_object(category_slug) serializer =", "Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format= None): product = self.get_object(category_slug,", "rest_framework.views import APIView from rest_framework.response import Response from rest_framework.decorators import api_view from .models", "= self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query',", "query = request.data.get('query', '') if query: products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query)) serializer =", "product = self.get_object(category_slug, product_slug) serializer = ProductSerializer(product) return Response(serializer.data) class CategoryDetail(APIView): def get_object(self,", "from .serializers import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products =", "class ProductDetail(APIView): def get_object(self, category_slug, product_slug): try: return Product.objects.filter(category__slug=category_slug).get(slug=product_slug) except Product.DoesNotExist: raise Http404", "class CategoryDetail(APIView): def get_object(self, category_slug): try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def", "try: return Category.objects.get(slug=category_slug) except Category.DoesNotExist: raise Http404 def get(self, request, category_slug, format= None):", "rest_framework.response import Response from rest_framework.decorators import api_view from .models import Product, Category from", "except Product.DoesNotExist: raise Http404 def get(self, request, category_slug, product_slug, format= None): product =", "import ProductSerializer, CategorySerializer class LatestProductsList(APIView): def get(self, request, format=None): products = Product.objects.all()[0:4] serializer", "return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '') if query: products =", "self.get_object(category_slug) serializer = CategorySerializer(category) return Response(serializer.data) @api_view(['POST']) def search(request): query = request.data.get('query', '')", "Create your views here. from rest_framework.views import APIView from rest_framework.response import Response from" ]
[ "aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname =", "work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None,", "self.nick = nick self.title = title self.comp = comp self.addr = addr self.home", "= ayear self.secaddr = secaddr self.secphone = secphone self.note = note self.id =", "__init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None,", "= nick self.title = title self.comp = comp self.addr = addr self.home =", "id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other): return", "= secphone self.note = note self.id = id def __repr__(self): return \"%s:%s:%s\" %", "amonth self.ayear = ayear self.secaddr = secaddr self.secphone = secphone self.note = note", "self.secphone = secphone self.note = note self.id = id def __repr__(self): return \"%s:%s:%s\"", "self.id == other.id) and self.fname == other.fname and self.lname == other.lname def id_or_max(self):", "secphone self.note = note self.id = id def __repr__(self): return \"%s:%s:%s\" % (self.id,", "def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id", "self.fname = fname self.mname = mname self.lname = lname self.nick = nick self.title", "def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None,", "return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is None", "fax self.email1 = email1 self.email2 = email2 self.email3 = email3 self.homepage = homepage", "or self.id == other.id) and self.fname == other.fname and self.lname == other.lname def", "self.lname = lname self.nick = nick self.title = title self.comp = comp self.addr", "title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None,", "byear self.aday = aday self.amonth = amonth self.ayear = ayear self.secaddr = secaddr", "is None or other.id is None or self.id == other.id) and self.fname ==", "\"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is None or", "email3 self.homepage = homepage self.bday = bday self.bmonth = bmonth self.byear = byear", "=None): self.fname = fname self.mname = mname self.lname = lname self.nick = nick", "nick self.title = title self.comp = comp self.addr = addr self.home = home", "self.homepage = homepage self.bday = bday self.bmonth = bmonth self.byear = byear self.aday", "= lname self.nick = nick self.title = title self.comp = comp self.addr =", "self.mobile = mobile self.work = work self.fax = fax self.email1 = email1 self.email2", "self.id = id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self,", "self.amonth = amonth self.ayear = ayear self.secaddr = secaddr self.secphone = secphone self.note", "(self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is None or other.id is", "id =None): self.fname = fname self.mname = mname self.lname = lname self.nick =", "None or self.id == other.id) and self.fname == other.fname and self.lname == other.lname", "ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname = mname self.lname", "byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname", "secphone=None, note=None, id =None): self.fname = fname self.mname = mname self.lname = lname", "or other.id is None or self.id == other.id) and self.fname == other.fname and", "addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None,", "homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname", "= note self.id = id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname)", "amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname = mname", "email2 self.email3 = email3 self.homepage = homepage self.bday = bday self.bmonth = bmonth", "class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None,", "mobile self.work = work self.fax = fax self.email1 = email1 self.email2 = email2", "= byear self.aday = aday self.amonth = amonth self.ayear = ayear self.secaddr =", "secaddr=None, secphone=None, note=None, id =None): self.fname = fname self.mname = mname self.lname =", "comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None,", "self.addr = addr self.home = home self.mobile = mobile self.work = work self.fax", "= bday self.bmonth = bmonth self.byear = byear self.aday = aday self.amonth =", "self.bday = bday self.bmonth = bmonth self.byear = byear self.aday = aday self.amonth", "homepage self.bday = bday self.bmonth = bmonth self.byear = byear self.aday = aday", "None or other.id is None or self.id == other.id) and self.fname == other.fname", "and self.lname == other.lname def id_or_max(self): if self.id: return int(self.id) else: return maxsize", "fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None,", "= aday self.amonth = amonth self.ayear = ayear self.secaddr = secaddr self.secphone =", "self.secaddr = secaddr self.secphone = secphone self.note = note self.id = id def", "= comp self.addr = addr self.home = home self.mobile = mobile self.work =", "= homepage self.bday = bday self.bmonth = bmonth self.byear = byear self.aday =", "bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname =", "self.note = note self.id = id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname,", "sys import maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None,", "def __eq__(self, other): return (self.id is None or other.id is None or self.id", "= mname self.lname = lname self.nick = nick self.title = title self.comp =", "= amonth self.ayear = ayear self.secaddr = secaddr self.secphone = secphone self.note =", "(self.id is None or other.id is None or self.id == other.id) and self.fname", "self.email1 = email1 self.email2 = email2 self.email3 = email3 self.homepage = homepage self.bday", "self.bmonth = bmonth self.byear = byear self.aday = aday self.amonth = amonth self.ayear", "is None or self.id == other.id) and self.fname == other.fname and self.lname ==", "= id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other):", "bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None): self.fname = fname", "addr self.home = home self.mobile = mobile self.work = work self.fax = fax", "= fax self.email1 = email1 self.email2 = email2 self.email3 = email3 self.homepage =", "mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None,", "email1 self.email2 = email2 self.email3 = email3 self.homepage = homepage self.bday = bday", "self.fname, self.lname) def __eq__(self, other): return (self.id is None or other.id is None", "ayear self.secaddr = secaddr self.secphone = secphone self.note = note self.id = id", "note self.id = id def __repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def", "self.email2 = email2 self.email3 = email3 self.homepage = homepage self.bday = bday self.bmonth", "== other.id) and self.fname == other.fname and self.lname == other.lname def id_or_max(self): if", "% (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is None or other.id", "self.fax = fax self.email1 = email1 self.email2 = email2 self.email3 = email3 self.homepage", "fname self.mname = mname self.lname = lname self.nick = nick self.title = title", "home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None,", "mname self.lname = lname self.nick = nick self.title = title self.comp = comp", "from sys import maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None,", "= home self.mobile = mobile self.work = work self.fax = fax self.email1 =", "lname self.nick = nick self.title = title self.comp = comp self.addr = addr", "comp self.addr = addr self.home = home self.mobile = mobile self.work = work", "work self.fax = fax self.email1 = email1 self.email2 = email2 self.email3 = email3", "self.ayear = ayear self.secaddr = secaddr self.secphone = secphone self.note = note self.id", "== other.fname and self.lname == other.lname def id_or_max(self): if self.id: return int(self.id) else:", "title self.comp = comp self.addr = addr self.home = home self.mobile = mobile", "note=None, id =None): self.fname = fname self.mname = mname self.lname = lname self.nick", "= mobile self.work = work self.fax = fax self.email1 = email1 self.email2 =", "= work self.fax = fax self.email1 = email1 self.email2 = email2 self.email3 =", "other.fname and self.lname == other.lname def id_or_max(self): if self.id: return int(self.id) else: return", "self.fname == other.fname and self.lname == other.lname def id_or_max(self): if self.id: return int(self.id)", "= bmonth self.byear = byear self.aday = aday self.amonth = amonth self.ayear =", "self.lname) def __eq__(self, other): return (self.id is None or other.id is None or", "nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None,", "self.work = work self.fax = fax self.email1 = email1 self.email2 = email2 self.email3", "self.email3 = email3 self.homepage = homepage self.bday = bday self.bmonth = bmonth self.byear", "__repr__(self): return \"%s:%s:%s\" % (self.id, self.fname, self.lname) def __eq__(self, other): return (self.id is", "import maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None,", "= email2 self.email3 = email3 self.homepage = homepage self.bday = bday self.bmonth =", "other.id) and self.fname == other.fname and self.lname == other.lname def id_or_max(self): if self.id:", "and self.fname == other.fname and self.lname == other.lname def id_or_max(self): if self.id: return", "email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id =None):", "= addr self.home = home self.mobile = mobile self.work = work self.fax =", "secaddr self.secphone = secphone self.note = note self.id = id def __repr__(self): return", "bmonth self.byear = byear self.aday = aday self.amonth = amonth self.ayear = ayear", "= email3 self.homepage = homepage self.bday = bday self.bmonth = bmonth self.byear =", "self.aday = aday self.amonth = amonth self.ayear = ayear self.secaddr = secaddr self.secphone", "other): return (self.id is None or other.id is None or self.id == other.id)", "aday self.amonth = amonth self.ayear = ayear self.secaddr = secaddr self.secphone = secphone", "mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None,", "self.mname = mname self.lname = lname self.nick = nick self.title = title self.comp", "email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None, id", "return (self.id is None or other.id is None or self.id == other.id) and", "bday self.bmonth = bmonth self.byear = byear self.aday = aday self.amonth = amonth", "self.home = home self.mobile = mobile self.work = work self.fax = fax self.email1", "other.id is None or self.id == other.id) and self.fname == other.fname and self.lname", "home self.mobile = mobile self.work = work self.fax = fax self.email1 = email1", "self.byear = byear self.aday = aday self.amonth = amonth self.ayear = ayear self.secaddr", "self.title = title self.comp = comp self.addr = addr self.home = home self.mobile", "maxsize class Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None,", "= fname self.mname = mname self.lname = lname self.nick = nick self.title =", "lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None, fax=None, email1=None, email2=None, email3=None, homepage=None,", "email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None, note=None,", "= title self.comp = comp self.addr = addr self.home = home self.mobile =", "= secaddr self.secphone = secphone self.note = note self.id = id def __repr__(self):", "fax=None, email1=None, email2=None, email3=None, homepage=None, bday=None, bmonth=None, byear=None, aday=None, amonth=None, ayear=None, secaddr=None, secphone=None,", "__eq__(self, other): return (self.id is None or other.id is None or self.id ==", "Contact: def __init__(self, fname=None, mname=None, lname=None, nick=None, title=None, comp=None, addr=None, home=None, mobile=None, work=None,", "self.comp = comp self.addr = addr self.home = home self.mobile = mobile self.work", "= email1 self.email2 = email2 self.email3 = email3 self.homepage = homepage self.bday =" ]
[ ") self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )", "tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\"", ") classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self", "test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\",", "SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT", "] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) : p = IECore.Parameterised(", "] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo(", "\"\", 0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\",", "IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ]", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "\"basicPresetTest.cob\" ) ) ) # reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" )", ") def tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ),", "All rights reserved. # # Redistribution and use in source and binary forms,", "ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "\"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj,", ") p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )", ": testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ),", "testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self", ") ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() )", "nor the names of any # other contributors to this software may be", "\"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "\"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\"", "os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove(", "of any # other contributors to this software may be used to endorse", "no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader", "its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" )", "False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath = os.path.abspath( os.path.join(", "True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) : p", "STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\",", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED.", ": if os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile( p )", "self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse(", "and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath,", ") self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj,", "os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without the classLoader", "testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True", "CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "* Redistributions of source code must retain the above copyright # notice, this", "THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #", ") ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) #", "= IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len(", "loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset", ") p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses(", "testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "__file__ ), \"data\", \"basicPreset\" ) ) paths = ( os.path.join( savePath, \"basicPresetTest\" ),", "LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ] ) testObj2", "[ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter(", "# modification, are permitted provided that the following conditions are # met: #", "elif os.path.isfile( p ) : os.remove( p ) if __name__ == \"__main__\": unittest.main()", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE", ": savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths", "IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse(", "p ) : shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove( p", "Design Inc. All rights reserved. # # Redistribution and use in source and", "paths : if os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile( p", "(INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT", "IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\",", "] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\"", "[ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:]", "c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self", "and the following disclaimer. # # * Redistributions in binary form must reproduce", "conditions and the following disclaimer in the # documentation and/or other materials provided", "True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors(", "IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0 )", "\"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 =", "self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj,", "testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ]", "used to endorse or # promote products derived from this software without specific", "\"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) )", ") preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self )", "p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj,", "] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual(", "\"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ),", "True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p =", "testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter(", "os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler", "loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual(", ") # Save for the classLoader and check its there, we test the", "\"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ] ) testObj2 =", "OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", "# Copyright (c) 2010-2012, Image Engine Design Inc. All rights reserved. # #", "\"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters()", "len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def", "True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ]", "\"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass(", "self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass(", "), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\",", "notice, this list of conditions and the following disclaimer in the # documentation", "testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() )", "SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import os import sys import", "Engine Design Inc. All rights reserved. # # Redistribution and use in source", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND", "), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p =", ") savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset", "without # modification, are permitted provided that the following conditions are # met:", "os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler :", "testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave(", "classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1,", "savePath, \"basicPresetTestClassLoader\" ) # make sure that no messages are emitted during loading", "testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters()", "preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue(", "\"\", 1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\",", "self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" )", "p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False", ") # reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo(", "), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in paths : if os.path.isdir(", ") ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() )", "IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that no", "classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:]", ") testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass(", "with or without # modification, are permitted provided that the following conditions are", "INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, #", "Save for the classLoader and check its there, we test the 'loadability' later...", "savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset =", "\"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p = IECore.BasicPreset(", "IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj,", "in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self )", ") testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ),", "self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) : savePath = os.path.abspath( os.path.join(", "= p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() )", "\"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\",", "(c) 2010-2012, Image Engine Design Inc. All rights reserved. # # Redistribution and", "def testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter(", "), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save", "NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT", "\"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ] ) testObj2 = IECore.Parameterised(", ") self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) )", ") ) paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ),", "[ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 )", ") testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ),", "), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter(", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE,", "for the classLoader and check its there, we test the 'loadability' later... preset.save(", ") ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) : p =", "software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED", "and/or other materials provided with the distribution. # # * Neither the name", "classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload p", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN", "\"basicPresetTest-1.py\" ) ) ) # save without the classLoader and check its there", "\"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for", ") paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join(", "IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ]", "os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\"", "self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def", ") preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\"", "= IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader and check its", "testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters()", ") self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self )", "above copyright # notice, this list of conditions and the following disclaimer. #", "testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj", "Engine Design nor the names of any # other contributors to this software", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL", "the classLoader and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile(", ") self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join(", "\"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in paths : if", "\"basicPresetTestClassLoader\" ), ) for p in paths : if os.path.isdir( p ) :", "rights reserved. # # Redistribution and use in source and binary forms, with", "True ) ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] )", "conditions and the following disclaimer. # # * Redistributions in binary form must", "# documentation and/or other materials provided with the distribution. # # * Neither", "documentation and/or other materials provided with the distribution. # # * Neither the", "classLoader and check its there, we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\"", "# # * Neither the name of Image Engine Design nor the names", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "Redistributions in binary form must reproduce the above copyright # notice, this list", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF", "# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR #", ") self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], )", "0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() )", "savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath,", "p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ),", "# make sure that no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler()", "savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without the classLoader and check", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR", "p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p,", "or without # modification, are permitted provided that the following conditions are #", "THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader =", "classes2[1:] ) def testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", ") self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj = IECore.Parameterised(", "(INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters()", ") self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload p =", "loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath )", ") ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath,", "OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE", "LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in paths :", ") messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\",", "0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj,", "os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload p = IECore.BasicPreset( os.path.join(", "p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True )", "), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname(", "testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter(", ") p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 )", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\",", "* Redistributions in binary form must reproduce the above copyright # notice, this", "= os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler()", "savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo(", "True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj =", "self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "import unittest import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self )", "DAMAGE. # ########################################################################## from __future__ import with_statement import os import sys import shutil", ") ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual(", "), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters()", "check its there, we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue(", "), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join(", "testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"]", "\"b\", \"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\",", "), \"data\", \"basicPreset\" ) ) paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join(", "\"basicPresetTestClassLoader\" ) # make sure that no messages are emitted during loading messageHandler", "True ) ] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True )", "testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2", "IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\",", "\"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True", "messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) )", "testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter(", ") self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) : savePath = os.path.abspath(", "to endorse or # promote products derived from this software without specific prior", "OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE", "SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "from this software without specific prior # written permission. # # THIS SOFTWARE", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS;", "= IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that", "code must retain the above copyright # notice, this list of conditions and", "# * Redistributions in binary form must reproduce the above copyright # notice,", "# notice, this list of conditions and the following disclaimer in the #", "for p in paths : if os.path.isdir( p ) : shutil.rmtree( p )", "testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self", "conditions are # met: # # * Redistributions of source code must retain", "testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2,", "following conditions are # met: # # * Redistributions of source code must", "self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without the", "= testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"]", "self ) : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", "in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj,", "members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ),", ") self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v )", ") self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without", "\"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload", "p, IECore.BasicPreset ) ) p.metadata() def testClasses( self ) : testObj = IECore.Parameterised(", "classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self )", "os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), )", "be used to endorse or # promote products derived from this software without", "\"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2", "p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2,", ") ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo(", "testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ]", "2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\"", "v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters()", "\"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0", "testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() )", "__file__ ), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p", "), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised(", "# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ##########################################################################", "\"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue(", "testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter(", "\"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\",", "IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader and check its there,", "] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), (", "self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) )", "p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) : savePath", "testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue(", "savePath, \"basicPresetTest.cob\" ) ) ) # reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\"", "list of conditions and the following disclaimer. # # * Redistributions in binary", "p in paths : if os.path.isdir( p ) : shutil.rmtree( p ) elif", "= IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p", "\"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\"", "p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse(", "= IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo(", "os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo(", "# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR", "testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"]", "\"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members", "messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses(", "Image Engine Design Inc. All rights reserved. # # Redistribution and use in", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "and use in source and binary forms, with or without # modification, are", "True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue(", "\"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\"", "), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" )", "), ] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True )", "= os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset(", "the following conditions are # met: # # * Redistributions of source code", ") testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join(", "OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", "# notice, this list of conditions and the following disclaimer. # # *", ") ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader", "( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ),", "# * Redistributions of source code must retain the above copyright # notice,", "testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2,", "this list of conditions and the following disclaimer in the # documentation and/or", "AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) )", "IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath", ") ) # save without the classLoader and check its there preset.save( savePath,", "names of any # other contributors to this software may be used to", "p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance(", "= IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\",", "p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) : savePath =", "other contributors to this software may be used to endorse or # promote", ") ) p.metadata() def testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "\"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 =", "testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj", "IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) )", "Inc. All rights reserved. # # Redistribution and use in source and binary", "[ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1", "disclaimer in the # documentation and/or other materials provided with the distribution. #", "), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" )", "classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:]", "), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters()", "__file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save(", "A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER", "True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\"", "# met: # # * Redistributions of source code must retain the above", "testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 =", "os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj,", "\"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2", "testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo(", "of Image Engine Design nor the names of any # other contributors to", "BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True", ") ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() )", "\"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p", ") p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue(", "CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True,", "), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [", "POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import os import", ": shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove( p ) if", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY", "IECore.BasicPreset ) ) p.metadata() def testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "permitted provided that the following conditions are # met: # # * Redistributions", ") # make sure that no messages are emitted during loading messageHandler =", "must retain the above copyright # notice, this list of conditions and the", "v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self", "= IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() )", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT,", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS", "= IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\",", ") p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 )", "os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo(", "), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True )", "and check its there, we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" )", "= IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo(", "THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo(", "Redistribution and use in source and binary forms, with or without # modification,", "INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader and check", "IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__", "p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(),", "# # Copyright (c) 2010-2012, Image Engine Design Inc. All rights reserved. #", "source and binary forms, with or without # modification, are permitted provided that", "= testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] )", "any # other contributors to this software may be used to endorse or", "[ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] )", "the names of any # other contributors to this software may be used", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE", "ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED", "[ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True )", "] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual(", "IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue(", ") def testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [", "above copyright # notice, this list of conditions and the following disclaimer in", "the classLoader and check its there, we test the 'loadability' later... preset.save( savePath,", "savePath, \"basicPresetTestClassLoader\" ), ) for p in paths : if os.path.isdir( p )", ") p.metadata() def testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ]", "testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad(", "self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) :", "of conditions and the following disclaimer. # # * Redistributions in binary form", "binary form must reproduce the above copyright # notice, this list of conditions", "Redistributions of source code must retain the above copyright # notice, this list", "testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False )", "form must reproduce the above copyright # notice, this list of conditions and", "modification, are permitted provided that the following conditions are # met: # #", "testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters()", "self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "the following disclaimer. # # * Redistributions in binary form must reproduce the", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\"", "sys import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase ) : def", "TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "Neither the name of Image Engine Design nor the names of any #", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING", ") self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) )", "self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue(", "p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2,", "\"maths\", \"multiply\" ), 2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters(", "\"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset(", "notice, this list of conditions and the following disclaimer. # # * Redistributions", "testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", ": p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages", "\"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()", "\"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 =", "True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\",", "[ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:] for", "OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import", "\"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without the classLoader and check its", ") testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ),", "c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:] for", "savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" )", "are permitted provided that the following conditions are # met: # # *", "the name of Image Engine Design nor the names of any # other", "testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset(", "save without the classLoader and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False )", "NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY", "# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF #", "\"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the", "the above copyright # notice, this list of conditions and the following disclaimer", "HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", ") ] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ]", "IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1", "1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\",", ") ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c in", "testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(),", "savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths =", "testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=(", "testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj", "other materials provided with the distribution. # # * Neither the name of", "\"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ] )", "savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) #", ") testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [", "IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue )", "FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", ") testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ),", "p ) elif os.path.isfile( p ) : os.remove( p ) if __name__ ==", "def testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter(", "\"c\", \"\", 0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue(", ") self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) )", ") p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] ) ) v = p.parameters().getValue().copy()", "self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2(", "testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that no messages are", "testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) : testObj", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS #", "testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that no messages", "testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True", "and the following disclaimer in the # documentation and/or other materials provided with", "shutil import unittest import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS #", ") preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that no messages are emitted", "preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" )", "savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler =", "forms, with or without # modification, are permitted provided that the following conditions", "savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY", ") : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False", "IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\",", "\"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] ) )", ") : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) )", "classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "the distribution. # # * Neither the name of Image Engine Design nor", "source code must retain the above copyright # notice, this list of conditions", "IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p", "in source and binary forms, with or without # modification, are permitted provided", "OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter(", ") p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members =", "USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "materials provided with the distribution. # # * Neither the name of Image", "\"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2", "testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "software may be used to endorse or # promote products derived from this", "or # promote products derived from this software without specific prior # written", "testObj, testObj.parameters() ) # Save for the classLoader and check its there, we", "testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ]", "# # Redistribution and use in source and binary forms, with or without", "self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [", "# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR", "distribution. # # * Neither the name of Image Engine Design nor the", "COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", "testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True )", "self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\"", "self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) )", ") ) ) # reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) )", "unittest import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self ) :", "= IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) )", "specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath,", "import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self ) : testObj", "\"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ (", "in the # documentation and/or other materials provided with the distribution. # #", "\"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses(", "= testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) :", "savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) )", "Design nor the names of any # other contributors to this software may", "IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData(", "are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader(", "IECore.BoolVectorData( [ True, False, True ] ) ) v = p.parameters().getValue().copy() preset =", "class TestBasicPreset( unittest.TestCase ) : def testCopy( self ) : testObj = IECore.Parameterised(", "testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save(", "\"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ),", "testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True )", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED", ") self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v", "), ) for p in paths : if os.path.isdir( p ) : shutil.rmtree(", "\"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [", "os.path.join( \"maths\", \"multiply\" ), 2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" )", "\"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True", "\"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0", "testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True )", ") classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2", "testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters()", "OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, #", "TestBasicPreset( unittest.TestCase ) : def testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\"", "0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0", "[ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] )", "its there, we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile(", ") self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self )", ") classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:],", "IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:] for c", "p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] ) ) v = p.parameters().getValue().copy() preset", "\"b\", \"\", 1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ),", "), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname(", "testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "\"c\", \"\", 0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ),", "testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset(", ") self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj,", "\"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] )", ") self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj = IECore.Parameterised(", "1.0 ) def testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE", "testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter( self ) :", "paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath,", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN", ") p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass(", "import with_statement import os import sys import shutil import unittest import IECore class", "preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) )", "FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "\"basicPreset\" ) ) paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\"", ") p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [", ") ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() )", "p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2", "that no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler :", "= IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\",", "IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages", "check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\"", "IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2", "self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj,", "1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath,", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement", "self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p(", "\"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" )", "classes1, classes2 ) def testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\" )", "IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2,", "classLoader and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue( os.path.isfile( os.path.join(", "use in source and binary forms, with or without # modification, are permitted", "this software may be used to endorse or # promote products derived from", ") testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\",", "in binary form must reproduce the above copyright # notice, this list of", "import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase ) : def testCopy(", "( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters(", "True ] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() )", "os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths = ( os.path.join( savePath, \"basicPresetTest\"", "testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "= IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\",", "os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in paths", "p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(),", "\"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised(", "IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2", "self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True", "= os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths = (", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "with the distribution. # # * Neither the name of Image Engine Design", "\"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter(", "reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()", "preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False", ") ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue(", "= IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] )", "testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for c", "os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() )", "), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self", "IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters(", "the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\"", "0.0 ) def testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p =", "testObj.parameters() ) # Save for the classLoader and check its there, we test", "testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(),", "p.metadata() def testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [", "= IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue", ") for p in paths : if os.path.isdir( p ) : shutil.rmtree( p", "testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 =", "THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING #", "\"data\", \"basicPreset\" ) ) paths = ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath,", "__future__ import with_statement import os import sys import shutil import unittest import IECore", "to this software may be used to endorse or # promote products derived", ") : def testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", ") ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath,", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER", ") testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual(", ") savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler", "p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() )", "without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY", "import sys import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase ) :", "testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() )", "] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] ) ) v", "IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p =", "self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", ") testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ),", "\"\", 0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo(", "1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata()", "= IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\",", "p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters()", "] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) )", "preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) )", "p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse(", "[ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ] )", "testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:]", "messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\"", "testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ]", "may be used to endorse or # promote products derived from this software", "), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ),", "= IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )", "testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] )", ") def testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE", "classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"]", "p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def", "self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p(", ") : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True", "), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] )", "\"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ]", "[ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\"", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR IMPLIED", ") elif os.path.isfile( p ) : os.remove( p ) if __name__ == \"__main__\":", ") self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo(", "OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", ") : shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove( p )", "\"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = [ c[1:] for c in", "Copyright (c) 2010-2012, Image Engine Design Inc. All rights reserved. # # Redistribution", "parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2,", "self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) :", "testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p", "] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual(", "def testClasses( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter(", "later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) )", "derived from this software without specific prior # written permission. # # THIS", ") self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj = IECore.Parameterised(", "this software without specific prior # written permission. # # THIS SOFTWARE IS", "OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "\"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [", "promote products derived from this software without specific prior # written permission. #", "2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\",", "endorse or # promote products derived from this software without specific prior #", "p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True", "classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 =", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR", "self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self ) :", "p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:]", ") ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] )", "DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "\"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\"", "reproduce the above copyright # notice, this list of conditions and the following", "'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" )", "testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ]", "testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 =", "products derived from this software without specific prior # written permission. # #", "testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2", "# Save for the classLoader and check its there, we test the 'loadability'", "False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ),", "import os import sys import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase", "self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath,", "IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\",", "def testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter(", "\"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath = os.path.abspath(", "shutil.rmtree( p ) elif os.path.isfile( p ) : os.remove( p ) if __name__", "# reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj,", "IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ),", "= IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" )", "and binary forms, with or without # modification, are permitted provided that the", "IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False,", ") p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual(", ") self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self )", "testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:], classes2[1:] ) def", "__file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters() ) #", "testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"]", ") self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 )", "p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p.applicableTo(", "# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR #", "messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len(", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE", "os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths = ( os.path.join( savePath,", "IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses(", "name of Image Engine Design nor the names of any # other contributors", "make sure that no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with", "\"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile(", "LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "= [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE)", "] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False", "testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual(", "False, True ] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters()", "IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath", "following disclaimer in the # documentation and/or other materials provided with the distribution.", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from", "without the classLoader and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False ) self.assertTrue(", ") ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0", "\"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue(", "disclaimer. # # * Redistributions in binary form must reproduce the above copyright", ") v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p,", "self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def", "p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2", ") def testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [", "savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue(", "[ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True", "= IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False )", "IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload", ") preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make", "c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p = IECore.BasicPreset(", "contributors to this software may be used to endorse or # promote products", "testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] )", "), 2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter(", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "Image Engine Design nor the names of any # other contributors to this", "IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters(", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR #", ") classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertEqual( classes1[1:],", "prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", ") testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(),", "True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\"", "] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData(", "# # * Redistributions in binary form must reproduce the above copyright #", "this list of conditions and the following disclaimer. # # * Redistributions in", "########################################################################## from __future__ import with_statement import os import sys import shutil import unittest", "IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() )", "len( messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo(", "of conditions and the following disclaimer in the # documentation and/or other materials", ") ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0", ") ) # reload p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest.cob\" ) ) self.assertTrue(", "for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def testCompoundVectorParameter(", ") ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual(", "os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) preset = IECore.BasicPreset( testObj, testObj.parameters()", "[ True, False, True ] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset(", "p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) )", "p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown(", "os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue(", "\"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) )", "messageHandler.messages ), 0 ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2,", "OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "\"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset )", "] ) classes1 = [ c[1:] for c in testObj.parameters()[\"b\"].getClasses( True ) ]", "testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\",", "IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\",", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES", "os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save without the classLoader and", "\"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData()", "os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join(", "True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testLoad( self ) : testObj =", "] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\"", ") self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 =", "= [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2", ") self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], )", "testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader(", "p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [", ") preset = IECore.BasicPreset( testObj, testObj.parameters() ) # Save for the classLoader and", "2010-2012, Image Engine Design Inc. All rights reserved. # # Redistribution and use", "EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath(", "during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath", "* Neither the name of Image Engine Design nor the names of any", "0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self )", "testSave( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.CompoundVectorParameter(", "provided that the following conditions are # met: # # * Redistributions of", "preset = IECore.BasicPreset( testObj, testObj.parameters() ) preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure", "IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ),", "IECore class TestBasicPreset( unittest.TestCase ) : def testCopy( self ) : testObj =", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, #", "\"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p =", "the # documentation and/or other materials provided with the distribution. # # *", "with messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load(", "c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:] for c in", "p, p.parameters() ) self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual(", "0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\"", "\"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath = os.path.abspath(", "def tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\",", "), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in", "savePath, \"basicPresetTest.cob\" ) ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2,", "testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = [ c[1:] for", "p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True", ") #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj,", "), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" )", "with_statement import os import sys import shutil import unittest import IECore class TestBasicPreset(", "met: # # * Redistributions of source code must retain the above copyright", "testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue(", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE", ") testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ),", "#reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters()", ": loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )()", "\"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 =", "in paths : if os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile(", "PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR", "= [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData() ), IECore.BoolVectorParameter( \"b\", \"\", IECore.BoolVectorData() ), ]", "\"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath =", "False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) p = IECore.BasicPreset( testObj,", "testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True", "IECore.FloatParameter( \"c\", \"\", 0.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__", ") ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) #", "IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF", "AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self ) : testObj", "True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath = os.path.abspath( os.path.join(", "= testObj.parameters()[\"b\"].getClass( True ) classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] )", "\"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\",", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF", "os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload p = IECore.BasicPreset( os.path.join( savePath,", "p.parameters().getValue(), v ) def tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname(", "os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with", "\"\", False ), IECore.CompoundVectorParameter( \"c\", \"\", members = [ IECore.StringVectorParameter( \"s\", \"\", IECore.StringVectorData()", ") preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\",", "\"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [", "testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\",", "unittest.TestCase ) : def testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\" )", "True ) self.assertEqual( classes1[1:], classes2[1:] ) def testClassVectors( self ) : testObj =", "os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\" ) ) paths = ( os.path.join(", "the following disclaimer in the # documentation and/or other materials provided with the", "p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v", "0.0 ) def testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters(", "True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"],", "with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual(", "must reproduce the above copyright # notice, this list of conditions and the", "testObj.parameters() ) ) self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) ) testObj.parameters()[\"a\"].setTypedValue( False ) testObj.parameters()[\"b\"].setTypedValue(", ") self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(),", "os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] )", ") ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.py\" ) ) ) # save", "True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) : testObj =", "classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] )", "OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import os import sys", "classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1,", "= [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2", "classes2[1:] ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] )", "self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) preset2 = IECore.BasicPreset( testObj, testObj.parameters(),", "= ( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\"", "\"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter(", "classes1, classes2 ) p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"]", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY,", "TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", ") ) ) # save without the classLoader and check its there preset.save(", "classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset( testObj,", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN", ") def testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [", ") self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) def testSave( self ) : testObj = IECore.Parameterised(", "1.0 ), ] ) savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), \"data\", \"basicPreset\"", "True, False, True ] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p,", "( os.path.join( savePath, \"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ),", "testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() )", "self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(),", "\"\", IECore.BoolVectorData() ), ] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\"", "are # met: # # * Redistributions of source code must retain the", "list of conditions and the following disclaimer in the # documentation and/or other", "there, we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join(", "THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__ import with_statement import os", "), ] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()", "\"multiply\" ), 2 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [", "classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo(", "self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest.cob\" ) ) ) # reload p = IECore.BasicPreset(", "os.path.join( savePath, \"basicPresetTestClassLoader\" ), ) for p in paths : if os.path.isdir( p", "# written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "preset( p, p.parameters() ) self.assertEqual( p.parameters().getValue(), v ) def tearDown( self ) :", "retain the above copyright # notice, this list of conditions and the following", "copyright # notice, this list of conditions and the following disclaimer. # #", "False ) testObj.parameters()[\"b\"].setTypedValue( 0.0 ) p( testObj, testObj.parameters() ) self.assertEqual( testObj.parameters()[\"a\"].getTypedValue(), True )", "\"mult\", os.path.join( \"maths\", \"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ]", "p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset( p, p.parameters()", "# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\",", "sure that no messages are emitted during loading messageHandler = IECore.CapturingMessageHandler() with messageHandler", "isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self ) : testObj =", ") ) self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()[\"c\"] ) ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1", "if os.path.isdir( p ) : shutil.rmtree( p ) elif os.path.isfile( p ) :", "] ) p = IECore.BasicPreset( testObj, testObj.parameters() ) self.assertTrue( p.applicableTo( testObj, testObj.parameters() )", "reserved. # # Redistribution and use in source and binary forms, with or", "# Redistribution and use in source and binary forms, with or without #", ") def testClassVectors( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [", "IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\",", "1 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\",", "LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "self.assertTrue( preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v )", ") ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 )", "that the following conditions are # met: # # * Redistributions of source", "p2 = IECore.BasicPreset( os.path.join( savePath, \"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() )", "] ) ) v = p.parameters().getValue().copy() preset = IECore.BasicPreset( p, p.parameters() ) self.assertTrue(", "), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" )", "), IECore.ClassParameter( \"b\", \"\", \"IECORE_OP_PATHS\", os.path.join( \"maths\", \"multiply\" ), 2 ), ] )", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS", "IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR", ") self.assertTrue( isinstance( p, IECore.BasicPreset ) ) p.metadata() def testClasses( self ) :", "in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:] for c in testObj2.parameters()[\"c\"].getClasses(", ": p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ),", "), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\" ),", ": def testCopy( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [", "NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "\"a\", \"\", True ), IECore.FloatParameter( \"b\", \"\", 1.0 ), ] ) savePath =", "[ c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 )", "classes2 ) def testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters(", ") p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"], ) ) self.assertTrue( p2.applicableTo( testObj,", ") testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ),", ") # save without the classLoader and check its there preset.save( savePath, \"basicPresetTest\",", "for c in testObj.parameters()[\"b\"].getClasses( True ) ] classes2 = [ c[1:] for c", "= IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\",", "IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) )", ") p = IECore.BasicPreset( testObj, testObj.parameters()[\"b\"] ) self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) )", "IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [ ( \"mult\", os.path.join( \"maths\",", "parameters=( testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset(", "# * Neither the name of Image Engine Design nor the names of", "] ) ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) )", "binary forms, with or without # modification, are permitted provided that the following", "of source code must retain the above copyright # notice, this list of", ") ] ) p[\"c\"][\"s\"].setValue( IECore.StringVectorData( [ \"1\", \"2\", \"3\" ] ) ) p[\"c\"][\"b\"].setValue(", "OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY,", "ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN", "[ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0 ), ] )", "# promote products derived from this software without specific prior # written permission.", "testObj.parameters()[\"a\"], ) ) preset2.save( savePath, \"basicPresetTest2\", classLoadable=False ) #reload p2 = IECore.BasicPreset( os.path.join(", ") classes2 = testObj2.parameters()[\"c\"].getClass( True ) self.assertNotEqual( classes1[1:], classes2[1:] ) p = IECore.BasicPreset(", ") self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) )", "# save without the classLoader and check its there preset.save( savePath, \"basicPresetTest\", classLoadable=False", "\"multiply\" ), 2 ), ( \"coIO\", \"compoundObjectInOut\", 1 ), ] ) testObj2 =", "we test the 'loadability' later... preset.save( savePath, \"basicPresetTest\" ) self.assertTrue( os.path.isfile( os.path.join( savePath,", "v ) def tearDown( self ) : savePath = os.path.abspath( os.path.join( os.path.dirname( __file__", ") ) p( testObj2, testObj2.parameters()[\"c\"] ) classes1 = testObj.parameters()[\"b\"].getClass( True ) classes2 =", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## from __future__", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS", "\"basicPresetTest\" ), os.path.join( savePath, \"basicPresetTest.cob\" ), os.path.join( savePath, \"basicPresetTest2.cob\" ), os.path.join( savePath, \"basicPresetTestClassLoader\"", "def testClassLoader( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter(", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF", "), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\",", "from __future__ import with_statement import os import sys import shutil import unittest import", "def testCompoundVectorParameter( self ) : p = IECore.Parameterised( \"test\" ) p.parameters().addParameters( [ IECore.BoolParameter(", "= IECore.BasicPreset( os.path.join( savePath, \"basicPresetLoadTest\", \"basicPresetLoadTest-1.cob\" ) ) self.assertEqual( len( messageHandler.messages ), 0", "os.path.isfile( os.path.join( savePath, \"basicPresetTest\", \"basicPresetTest-1.cob\" ) ) ) self.assertTrue( os.path.isfile( os.path.join( savePath, \"basicPresetTest\",", "def testLoad( self ) : testObj = IECore.Parameterised( \"testParameterised1\" ) testObj.parameters().addParameters( [ IECore.BoolParameter(", ")() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p, IECore.BasicPreset ) )", "\"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\", 0.0", "# other contributors to this software may be used to endorse or #", "IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False ), IECore.FloatParameter( \"c\", \"\",", "True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()[\"a\"],", "# ########################################################################## from __future__ import with_statement import os import sys import shutil import", "the above copyright # notice, this list of conditions and the following disclaimer.", "\"\", 1.0 ), ] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter(", "\"basicPresetTest2.cob\" ) ) self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue( p2.applicableTo( testObj2, testObj.parameters()", "), ] ) testObj2 = IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassVectorParameter( \"c\", \"\",", "provided with the distribution. # # * Neither the name of Image Engine", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS # IS\" AND ANY EXPRESS OR", "self.assertTrue( p.applicableTo( testObj, testObj.parameters()[\"b\"] ) ) self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) ) self.assertTrue(", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO", "] ) testObj2 = IECore.Parameterised( \"testParameterised1\" ) testObj2.parameters().addParameters( [ IECore.BoolParameter( \"a\", \"\", False", "savePath ) ) p = loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0", "messageHandler : loader = IECore.ClassLoader( IECore.SearchPath( savePath ) ) p = loader.load( \"basicPresetTestClassLoader\"", "GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION)", "\"basicPreset\" ) ) messageHandler = IECore.CapturingMessageHandler() with messageHandler : p = IECore.BasicPreset( os.path.join(", "c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertNotEqual( classes1, classes2 ) p", "########################################################################## # # Copyright (c) 2010-2012, Image Engine Design Inc. All rights reserved.", "# # * Redistributions of source code must retain the above copyright #", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS", ") ) p[\"c\"][\"b\"].setValue( IECore.BoolVectorData( [ True, False, True ] ) ) v =", "os import sys import shutil import unittest import IECore class TestBasicPreset( unittest.TestCase )", "preset.save( savePath, \"basicPresetTestClassLoader\" ) # make sure that no messages are emitted during", "INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "testObj2, testObj.parameters() ) ) p2( testObj2, testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual(", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "testObj2.parameters() ) self.assertEqual( testObj2.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj2.parameters()[\"c\"].getTypedValue(), 0.0 ) def testClassLoader( self", "# IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "preset.applicableTo( p, p.parameters() ) ) p.parameters().setValue( p.parameters().defaultValue ) self.assertNotEqual( p.parameters().getValue(), v ) preset(", "= loader.load( \"basicPresetTestClassLoader\" )() self.assertEqual( len( messageHandler.messages ), 0 ) self.assertTrue( isinstance( p,", "IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] ) classes1", "copyright # notice, this list of conditions and the following disclaimer in the", "c[1:] for c in testObj2.parameters()[\"c\"].getClasses( True ) ] self.assertEqual( classes1, classes2 ) def", "testObj.parameters()[\"a\"].getTypedValue(), True ) self.assertEqual( testObj.parameters()[\"b\"].getTypedValue(), 1.0 ) p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=(", "\"a\", \"\", True ), IECore.ClassVectorParameter( \"b\", \"\", \"IECORE_OP_PATHS\" ), ] ) testObj.parameters()[\"b\"].setClasses( [", "= IECore.Parameterised( \"testParameterised2\" ) testObj2.parameters().addParameters( [ IECore.ClassParameter( \"c\", \"\", \"IECORE_OP_PATHS\" ), ] )", "following disclaimer. # # * Redistributions in binary form must reproduce the above" ]
[ "can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1,", "Domain from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import", "= len(agent_states) - 1 s = np.zeros( 2 + num_ghosts * 3 +", "randomAction) # keep track of eaten stuff for graphics (original code assumes #", "# used in getLayout function f = open(fullname) grid = [line.strip() for line", "+= 1 elif char == \"\\n\": y += 1 x = -1 elif", "- 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y locations and", "+= 1 elif char == \"o\": coord = (x, self.layout_copy.height - y) if", "state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly", "eaten by ghost or out of time, and for success, all food on", "in getLayout function f = open(fullname) grid = [line.strip() for line in f]", "stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents", "1 def _get_state(self): \"\"\" get the internal game state represented as a numpy", "y coordinates as well as the scare time of each ghost (\"scare time\"", "series of dimensions: * [2] The x and y coordinates of pacman *", "given by (2 + 3*ng + nf + nc). **ACTIONS:** Move Pacman [up,", "Move Pacman [up, down, left, right, stay] **REWARD:** See the Berkeley project website", "whether the game should terminate at the given state. (Terminate for failure, ie", "= pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction", "3. **STATE:** The state vector has a series of dimensions: * [2] The", "time pacman or a ghost moves. # s.data.food is the correct food matrix", "a, s=None): if s is not None: errStr = 'ERROR: In Pacman.py, attempted", "assumes # graphics are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten", "info. .. note:: The visualization runs as fast as your CPU will permit;", "internal game state represented as a numpy array \"\"\" data = self.game_state.data agent_states", "map-dependent, and *ng* can be set as a parameter. Based on above, total", "code assumes # graphics are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten", ".Domain import Domain from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import", "any given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for", "to slow things down so gameplay is actually visible, de-comment time.sleep() in the", "[] for char in str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i]) i", "= self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance and updates", "= s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None", "False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts", "= \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts", "terminate, returns the proper indication to step function. Accounts for scoring changes in", "the board or not *nf* and *nc* are map-dependent, and *ng* can be", "- 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] *", "_get_state(self): \"\"\" get the internal game state represented as a numpy array \"\"\"", "graphics are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten", "DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def", "\"\"\" # copies most recent state data = self.game_state.data agent_states = data.agentStates #", "nc). **ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:** See the Berkeley", "possible actions pacman can perform at any given # state possibleActions = []", "capsule.) * [nf] binary variables indicating if a food is still on the", "# Puts the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout =", "grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass", "future return np.array([0]) # makes an array of possible actions pacman can perform", "5 episodeCap = 1000 #: location of layouts shipped with rlpy default_layout_dir =", "method. **REFERENCE:** This domain is an RLPy wrapper for the implementation from the", "1 return s state = property(_get_state, _set_state) def showDomain(self, a, s=None): if s", "game should terminate, returns the proper indication to step function. Accounts for scoring", "'If you do pass a state parameter, ensure it is set to None.'", "format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents =", "vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game =", "coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1 elif", "The state vector has a series of dimensions: * [2] The x and", "= next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for graphics (original code", "Internal states accounted for along with scoring and terminal checking. Returns a tuple", "# set ghost position num_ghosts = len(agent_states) - 1 for i in range(1,", "terminal checking. Returns a tuple of form (reward, new state vector, terminal) \"\"\"", "in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used in getLayout function", "`BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For", "the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) #", "grid = [line.strip() for line in f] f.close() return grid class DummyGraphics(object): def", "ghost info for i in range(num_ghosts): s[2 + i * 3: 2 +", "\"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost(", "and capsules status i = 2 + num_ghosts * 3 x = 0", "of pacman * [3 * ng] the x and y coordinates as well", "s = self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data)", "used by the original pacman package. \"\"\" # copies most recent state data", "failure, ie eaten by ghost or out of time, and for success, all", "= layoutFile # Puts the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile)", "represented as a numpy array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts", "= agent_states[i + 1].scaredTimer # get food and capsules status i = 2", "= (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1 elif char", "len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer = None self.gameDisplay =", "you do pass a state parameter, ensure it is set to None.' raise", "# Specifies which Pacman world you want self.layoutFile = layoutFile # Puts the", "+= 1 x += 1 return s state = property(_get_state, _set_state) def showDomain(self,", "s and sets the internal game state used by the original pacman package.", "array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1", "get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost", "_max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5", "= [] for char in str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i])", "= numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy", "next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for graphics (original", "(original code assumes # graphics are updated after every agent's move) next_state.data._foodEaten =", "\"\"\" Pacman domain, which acts as a wrapper for the Pacman implementation from", "a vector s and sets the internal game state used by the original", "should not matter anyway, maybe clean up in # the future return np.array([0])", "from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy", "internal game state used by the original pacman package. \"\"\" # copies most", "for char in str(self.layout_copy): if char == \".\": s[i] = data.food[x][y] i +=", "matter anyway, maybe clean up in # the future return np.array([0]) # makes", "i) # TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind]", "range(1, num_ghosts + 1): part_s = s[(3 * i) - 1:3 * i]", "use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i,", "internal states when an episode starts, returns a s vector \"\"\" self.game_state =", "\".\": data.food[x][y] = bool(s_food[i]) i += 1 elif char == \"o\": coord =", "def _get_state(self): \"\"\" get the internal game state represented as a numpy array", "[nf] binary variables indicating if a food is still on the board or", "be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height", "# set food and capsules locations s_food = s[(num_ghosts + 1) * 3:]", "[\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class", "3 + 2] = agent_states[i + 1].scaredTimer # get food and capsules status", "set to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay", "food and capsules locations s_food = s[(num_ghosts + 1) * 3:] x =", "i += 1 elif char == \"\\n\": y += 1 x = -1", "rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join(", "if char == \".\": data.food[x][y] = bool(s_food[i]) i += 1 elif char ==", "terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states when an episode starts, returns", "2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD", "is an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course project 3", "self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food)", "every time pacman or a ghost moves. # s.data.food is the correct food", "each ghost (\"scare time\" is how long the ghost remains scared after consuming", "capsules locations s_food = s[(num_ghosts + 1) * 3:] x = 0 y", "# pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)):", "super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of limits for each dimension", "for failure, ie eaten by ghost or out of time, and for success,", "domain is an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course project", "# the future return np.array([0]) # makes an array of possible actions pacman", "and terminal checking. Returns a tuple of form (reward, new state vector, terminal)", "'to showDomain(); Pacman only supports internal states.'\\ 'If you do pass a state", "= agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info for i in", "initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass def finalize(self, *arg, **kwargs):", "part_s = s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1])", "= 0 y = 0 i = 0 data.capsules = [] for char", "* [2] The x and y coordinates of pacman * [3 * ng]", "a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs", "len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer =", "= self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman", "given state. Internal states accounted for along with scoring and terminal checking. Returns", "line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts", "on map eaten.) If game should terminate, returns the proper indication to step", "position num_ghosts = len(agent_states) - 1 for i in range(1, num_ghosts + 1):", "2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get pacman position", "# get ghost info for i in range(num_ghosts): s[2 + i * 3:", "layoutFile # Puts the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout", "for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten =", "actions from outside the Pacman domain to the given state. Internal states accounted", "TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state =", "state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves:", "( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes", "self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() #", "stay] **REWARD:** See the Berkeley project website below for more info. .. note::", "self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action \"a\"", "possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game should terminate", "are map-dependent, and *ng* can be set as a parameter. Based on above,", "somewhat hacky, but should not matter anyway, maybe clean up in # the", "= (x, self.layout_copy.height - y) if coord in data.capsules: s[i] = 1. i", "2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * (", "self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes", "by the original pacman package. \"\"\" # copies most recent state data =", "\"\"\" layoutFile: filename of the map file noise: with this probability pacman makes", "s state = property(_get_state, _set_state) def showDomain(self, a, s=None): if s is not", "self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy,", "s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) #", "the given state. Internal states accounted for along with scoring and terminal checking.", "x = -1 x += 1 def _get_state(self): \"\"\" get the internal game", "Takes a vector s and sets the internal game state used by the", "- 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set", "<filename>rlpy/Domains/Pacman.py \"\"\"Pacman game domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain import Domain from", "of each ghost (\"scare time\" is how long the ghost remains scared after", "__rlpy_location__ from .Domain import Domain from .PacmanPackage import layout, pacman, game, ghostAgents from", "statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y", "s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten", "fullname): # used in getLayout function f = open(fullname) grid = [line.strip() for", "- 2]) # adds ghost x, y locations and scaredTimer (how long they", "state represented as a numpy array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates", "if char == \".\": s[i] = data.food[x][y] i += 1 elif char ==", "**REWARD:** See the Berkeley project website below for more info. .. note:: The", "\"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s", "randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track", "\"\"\" Applies actions from outside the Pacman domain to the given state. Internal", "pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) #", ".PacmanPackage import graphicsDisplay import numpy as np from copy import deepcopy import os", "deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None", "ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time])", "possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not matter anyway, maybe clean", "right, stay] **REWARD:** See the Berkeley project website below for more info. ..", "if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics", "move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r =", "*ng* can be set as a parameter. Based on above, total dimensionality of", "ghost remains scared after consuming a capsule.) * [nf] binary variables indicating if", "the proper indication to step function. Accounts for scoring changes in terminal states.", "3 + 2] = agent_states[i + 1].configuration.pos s[2 + i * 3 +", "numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy =", "= len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer", "__rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\"", "down so gameplay is actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:**", "+ 3*ng + nf + nc). **ACTIONS:** Move Pacman [up, down, left, right,", "x = -1 elif char == \"o\": coord = (x, self.layout_copy.height - y)", "next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action \"a\" in", "+ 1) * 3:] x = 0 y = 0 i = 0", "self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics =", "domain to the given state. Internal states accounted for along with scoring and", "next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for graphics (original code assumes", "file noise: with this probability pacman makes a random move instead the one", "self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states when an episode starts,", "1 for i in range(1, num_ghosts + 1): part_s = s[(3 * i)", "CPU will permit; to slow things down so gameplay is actually visible, de-comment", "statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0,", "accounted for along with scoring and terminal checking. Returns a tuple of form", "is map-dependent, and given by (2 + 3*ng + nf + nc). **ACTIONS:**", "\"South\", \"West\"] actions_num = 5 episodeCap = 1000 #: location of layouts shipped", "\"\"\" Takes a vector s and sets the internal game state used by", "in data.capsules: s[i] = 1. i += 1 x += 1 return s", "layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map file noise: with", "food and capsules status i = 2 + num_ghosts * 3 x =", "s vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game", "game should terminate at the given state. (Terminate for failure, ie eaten by", ".PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as", "it is set to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is", "* 3 x = 0 y = 0 for char in str(self.layout_copy): if", "= None # time.sleep(0.1) # Sleep for 0.1 sec def step(self, a): \"\"\"", "* ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\"", "range(num_ghosts): s[2 + i * 3: 2 + i * 3 + 2]", "website below for more info. .. note:: The visualization runs as fast as", "as a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course project 3.", "0 y = 0 i = 0 data.capsules = [] for char in", "updates # the display every time pacman or a ghost moves. # s.data.food", "to pass a state (s)'\\ 'to showDomain(); Pacman only supports internal states.'\\ 'If", "len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use", "self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance and updates #", "\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain):", "= 2 + num_ghosts * 3 x = 0 y = 0 for", "x += 1 def _get_state(self): \"\"\" get the internal game state represented as", "eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2])", "== \".\": data.food[x][y] = bool(s_food[i]) i += 1 elif char == \"o\": coord", "from copy import deepcopy import os import time __copyright__ = \"Copyright 2013, RLPy", "the internal game state used by the original pacman package. \"\"\" # copies", "next_state = next_state_p # pacman performs action \"a\" in current state object #", "2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y locations and scaredTimer", "\"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts as", "well as the scare time of each ghost (\"scare time\" is how long", "break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind =", "np.zeros( 2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get pacman", "the domain see the original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time =", "3: 2 + i * 3 + 2] = agent_states[i + 1].configuration.pos s[2", "pacman package. \"\"\" # copies most recent state data = self.game_state.data agent_states =", "True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but", "self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return", "pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if", "or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream", "as fast as your CPU will permit; to slow things down so gameplay", "Returns a tuple of form (reward, new state vector, terminal) \"\"\" if self.random_state.random_sample()", "a parameter. Based on above, total dimensionality of state vector is map-dependent, and", "1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food", "game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food", "agent_states[i].scaredTimer = part_s[2] # set food and capsules locations s_food = s[(num_ghosts +", "s0(self): \"\"\" re-initializes internal states when an episode starts, returns a s vector", "= next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\"", "os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000):", "package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\",", "self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False", "binary variables for each capsule indicating if it is still on the board", "from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped)", "to step function. Accounts for scoring changes in terminal states. \"\"\" return self.game_state.data._lose", "= None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of limits", "scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal =", "do pass a state parameter, ensure it is set to None.' raise Exception(errStr)", "fast as your CPU will permit; to slow things down so gameplay is", "- y) if s_food[i]: data.capsules.append(coord) i += 1 elif char == \"\\n\": y", "ghost or out of time, and for success, all food on map eaten.)", "return s state = property(_get_state, _set_state) def showDomain(self, a, s=None): if s is", "internal states.'\\ 'If you do pass a state parameter, ensure it is set", "self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def", "num_ghosts * 3 x = 0 y = 0 for char in str(self.layout_copy):", "Pacman(Domain): \"\"\" Pacman domain, which acts as a wrapper for the Pacman implementation", "(zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the original package in", "long the ghost remains scared after consuming a capsule.) * [nf] binary variables", "long they can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width -", "3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of", "1].configuration.pos s[2 + i * 3 + 2] = agent_states[i + 1].scaredTimer #", "attempted to pass a state (s)'\\ 'to showDomain(); Pacman only supports internal states.'\\", "= False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) #", "each capsule indicating if it is still on the board or not *nf*", "s vector in pacman gamestate instance and updates # the display every time", "agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules locations", "= pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman,", "showDomain(self, a, s=None): if s is not None: errStr = 'ERROR: In Pacman.py,", "and capsules locations s_food = s[(num_ghosts + 1) * 3:] x = 0", "-1 x += 1 def _get_state(self): \"\"\" get the internal game state represented", "random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) #", "ghost position num_ghosts = len(agent_states) - 1 for i in range(1, num_ghosts +", "Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy,", "See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain", "possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game should terminate at", "locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x,", "char == \"\\n\": y += 1 x = -1 elif char == \"o\":", "+= 1 def _get_state(self): \"\"\" get the internal game state represented as a", "on above, total dimensionality of state vector is map-dependent, and given by (2", "and y coordinates as well as the scare time of each ghost (\"scare", "self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self):", "def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)]", "next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random", "str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i]) i += 1 elif char", "= ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for", "Pacman [up, down, left, right, stay] **REWARD:** See the Berkeley project website below", "== \".\": s[i] = data.food[x][y] i += 1 elif char == \"\\n\": y", "of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def", "the original pacman package. \"\"\" # copies most recent state data = self.game_state.data", "self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes", "next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score", "if self.isTerminal(): # somewhat hacky, but should not matter anyway, maybe clean up", "by the action \"\"\" self.noise = noise # Specifies which Pacman world you", "def step(self, a): \"\"\" Applies actions from outside the Pacman domain to the", "# get food and capsules status i = 2 + num_ghosts * 3", "layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state", "char == \"\\n\": y += 1 x = -1 x += 1 def", "self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action \"a\" in current state", "course project 3. **STATE:** The state vector has a series of dimensions: *", "def _tryToLoad(self, fullname): # used in getLayout function f = open(fullname) grid =", "None s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1 sec def step(self,", "+ self.num_total_food + self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos # import", "\"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile:", "data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost position", "coordinates of pacman * [3 * ng] the x and y coordinates as", "of form (reward, new state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: #", "step(self, a): \"\"\" Applies actions from outside the Pacman domain to the given", "x = 0 y = 0 i = 0 data.capsules = [] for", "agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts = len(agent_states) - 1", "s[1]) # set ghost position num_ghosts = len(agent_states) - 1 for i in", "y) if coord in data.capsules: s[i] = 1. i += 1 x +=", "the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose():", "= (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules locations s_food", "* [3 * ng] the x and y coordinates as well as the", "= (s[0], s[1]) # set ghost position num_ghosts = len(agent_states) - 1 for", "import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np", "actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain is an", "pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\"", "parameter, ensure it is set to None.' raise Exception(errStr) s = self.game_state if", "of state vector is map-dependent, and given by (2 + 3*ng + nf", "= agent_states[i + 1].configuration.pos s[2 + i * 3 + 2] = agent_states[i", "Pacman only supports internal states.'\\ 'If you do pass a state parameter, ensure", "char in str(self.layout_copy): if char == \".\": s[i] = data.food[x][y] i += 1", "tuple of form (reward, new state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise:", "Puts the file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content)", "ghost (\"scare time\" is how long the ghost remains scared after consuming a", "= pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList())", "self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules", "or not *nf* and *nc* are map-dependent, and *ng* can be set as", "scare time of each ghost (\"scare time\" is how long the ghost remains", "\"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\"", "x = 0 y = 0 for char in str(self.layout_copy): if char ==", "2] = agent_states[i + 1].configuration.pos s[2 + i * 3 + 2] =", "time, and for success, all food on map eaten.) If game should terminate,", "Makes an array of limits for each dimension in the state vector. statespace_limits", "pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts = len(agent_states)", "+ i * 3: 2 + i * 3 + 2] = agent_states[i", "s[2 + i * 3 + 2] = agent_states[i + 1].scaredTimer # get", "see the original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions", "given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a", "for line in f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs):", "file in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number", "layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np from", "return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs):", "for char in str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i]) i +=", "= 1000 #: location of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__,", "os import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\",", "locations and scaredTimer (how long they can be # eaten) for ghost in", "scaredTimer (how long they can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1,", "np from copy import deepcopy import os import time __copyright__ = \"Copyright 2013,", "vector s and sets the internal game state used by the original pacman", "stuff for graphics (original code assumes # graphics are updated after every agent's", "binary variables indicating if a food is still on the board or not", "<https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the", "the ghost remains scared after consuming a capsule.) * [nf] binary variables indicating", "graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food =", "states accounted for along with scoring and terminal checking. Returns a tuple of", "* [nc] binary variables for each capsule indicating if it is still on", "if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain", "move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions", "of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules", "sets the internal game state used by the original pacman package. \"\"\" #", "0.1 sec def step(self, a): \"\"\" Applies actions from outside the Pacman domain", "sec def step(self, a): \"\"\" Applies actions from outside the Pacman domain to", "self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch", "= graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food", "(Terminate for failure, ie eaten by ghost or out of time, and for", "= self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action", "__copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"]", "self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance", "y coordinates of pacman * [3 * ng] the x and y coordinates", "updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring", "None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array", "self.layoutFile = layoutFile # Puts the file in line stripped format layout_file_content =", "showDomain(); Pacman only supports internal states.'\\ 'If you do pass a state parameter,", "s.data.food is the correct food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)):", "is still on the board or not *nf* and *nc* are map-dependent, and", "has a series of dimensions: * [2] The x and y coordinates of", "course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more", "vector. statespace_limits = [] # adds pacman x, y locations statespace_limits.append([1, self.layout.width -", "elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules =", "1 x = -1 x += 1 def _get_state(self): \"\"\" get the internal", "indicating if it is still on the board or not *nf* and *nc*", "return np.array([0]) # makes an array of possible actions pacman can perform at", "**ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:** See the Berkeley project", "= property(_get_state, _set_state) def showDomain(self, a, s=None): if s is not None: errStr", "starts, returns a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy", "= len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer = None self.gameDisplay", "_tryToLoad(self, fullname): # used in getLayout function f = open(fullname) grid = [line.strip()", "self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1 elif char == \"\\n\":", "\"\\n\": y += 1 x = -1 elif char == \"o\": coord =", "or not * [nc] binary variables for each capsule indicating if it is", "deepcopy import os import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ =", "in range(1, num_ghosts + 1): part_s = s[(3 * i) - 1:3 *", "= deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents)", "on the board or not * [nc] binary variables for each capsule indicating", "This domain is an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course", "The x and y coordinates of pacman * [3 * ng] the x", "i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used in getLayout", "= [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap = 1000 #:", "== \"\\n\": y += 1 x = -1 elif char == \"o\": coord", "you want self.layoutFile = layoutFile # Puts the file in line stripped format", "+ nc). **ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:** See the", "RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See", "= data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost", "the original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions =", "are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten #", "which Pacman world you want self.layoutFile = layoutFile # Puts the file in", "\"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman", "0) # the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin()", "+ i * 3 + 2] = agent_states[i + 1].scaredTimer # get food", "of time, and for success, all food on map eaten.) If game should", "as a numpy array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts =", "a numpy array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states)", "food is still on the board or not * [nc] binary variables for", "actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap = 1000", "self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules =", "x += 1 return s state = property(_get_state, _set_state) def showDomain(self, a, s=None):", "_set_statespace_limits(self): # Makes an array of limits for each dimension in the state", "\"\\n\": y += 1 x = -1 x += 1 def _get_state(self): \"\"\"", "map eaten.) If game should terminate, returns the proper indication to step function.", "with this probability pacman makes a random move instead the one specified by", "wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The", "randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff", "* 3 + 2] = agent_states[i + 1].configuration.pos s[2 + i * 3", "state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random Move a =", "self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch =", "the given state. (Terminate for failure, ie eaten by ghost or out of", "return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states when an", "\"o\": coord = (x, self.layout_copy.height - y) if coord in data.capsules: s[i] =", "in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None #", "(x, self.layout_copy.height - y) if coord in data.capsules: s[i] = 1. i +=", "state (s)'\\ 'to showDomain(); Pacman only supports internal states.'\\ 'If you do pass", "(s)'\\ 'to showDomain(); Pacman only supports internal states.'\\ 'If you do pass a", "up in # the future return np.array([0]) # makes an array of possible", "for success, all food on map eaten.) If game should terminate, returns the", "= [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname):", "y) if s_food[i]: data.capsules.append(coord) i += 1 elif char == \"\\n\": y +=", "project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details", "array of possible actions pacman can perform at any given # state possibleActions", "food on map eaten.) If game should terminate, returns the proper indication to", "import Domain from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay", "= [] # adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1,", "and sets the internal game state used by the original pacman package. \"\"\"", "agent_states[i + 1].scaredTimer # get food and capsules status i = 2 +", "the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_", "set as a parameter. Based on above, total dimensionality of state vector is", "states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts =", "i in range(num_ghosts): s[2 + i * 3: 2 + i * 3", "Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents)", "* 3:] x = 0 y = 0 i = 0 data.capsules =", "consuming a capsule.) * [nf] binary variables indicating if a food is still", "vector in pacman gamestate instance and updates # the display every time pacman", "if self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a]", "adds ghost x, y locations and scaredTimer (how long they can be #", "(part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules locations s_food =", "# pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i", "hacky, but should not matter anyway, maybe clean up in # the future", "elif char == \"o\": coord = (x, self.layout_copy.height - y) if coord in", "the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has a series of", "self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0,", "== \"\\n\": y += 1 x = -1 x += 1 def _get_state(self):", "self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self,", "# scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal", "numpy as np from copy import deepcopy import os import time __copyright__ =", "game state represented as a numpy array \"\"\" data = self.game_state.data agent_states =", "\"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num =", "(2 + 3*ng + nf + nc). **ACTIONS:** Move Pacman [up, down, left,", "pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction =", "(s[0], s[1]) # set ghost position num_ghosts = len(agent_states) - 1 for i", "# Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state =", "self).__init__() def _set_statespace_limits(self): # Makes an array of limits for each dimension in", "self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks", "for more info. .. note:: The visualization runs as fast as your CPU", "ie eaten by ghost or out of time, and for success, all food", "s[(num_ghosts + 1) * 3:] x = 0 y = 0 i =", "= 0 y = 0 for char in str(self.layout_copy): if char == \".\":", "with scoring and terminal checking. Returns a tuple of form (reward, new state", "self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance and updates # the", "note:: The visualization runs as fast as your CPU will permit; to slow", "if it is still on the board or not *nf* and *nc* are", "Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState()", "2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def", "the action \"\"\" self.noise = noise # Specifies which Pacman world you want", "adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2])", "+= 1 x = -1 x += 1 def _get_state(self): \"\"\" get the", "one specified by the action \"\"\" self.noise = noise # Specifies which Pacman", "`Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"]", "re-initializes internal states when an episode starts, returns a s vector \"\"\" self.game_state", "for i in range(1, num_ghosts + 1): part_s = s[(3 * i) -", "variables indicating if a food is still on the board or not *", "pacman, game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np from copy", "a state parameter, ensure it is set to None.' raise Exception(errStr) s =", "= 0 data.capsules = [] for char in str(self.layout_copy): if char == \".\":", "data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s =", "Specifies which Pacman world you want self.layoutFile = layoutFile # Puts the file", "See the Berkeley project website below for more info. .. note:: The visualization", "folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num", "+ num_ghosts * 3 x = 0 y = 0 for char in", "details of the domain see the original package in the `Domains/PacmanPackage` folder. \"\"\"", "domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage import layout,", "\"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain,", "coord in data.capsules: s[i] = 1. i += 1 x += 1 return", "np.array([0]) # makes an array of possible actions pacman can perform at any", "1. i += 1 x += 1 return s state = property(_get_state, _set_state)", "False def _tryToLoad(self, fullname): # used in getLayout function f = open(fullname) grid", "self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer = None", "i = 2 + num_ghosts * 3 x = 0 y = 0", "def isTerminal(self): \"\"\" Checks whether the game should terminate at the given state.", "= self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents #", "self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits,", "want self.layoutFile = layoutFile # Puts the file in line stripped format layout_file_content", "i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules", "= np.zeros( 2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get", "__init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map", "original pacman package. \"\"\" # copies most recent state data = self.game_state.data agent_states", "should terminate at the given state. (Terminate for failure, ie eaten by ghost", "f = open(fullname) grid = [line.strip() for line in f] f.close() return grid", "location of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\")", "not * [nc] binary variables for each capsule indicating if it is still", "= next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(),", "= False def _tryToLoad(self, fullname): # used in getLayout function f = open(fullname)", "part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and capsules locations s_food = s[(num_ghosts", "= [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return", "or out of time, and for success, all food on map eaten.) If", "0 y = 0 for char in str(self.layout_copy): if char == \".\": s[i]", "self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics", "supports internal states.'\\ 'If you do pass a state parameter, ensure it is", "#: location of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\",", "a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game", "self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate", "def s0(self): \"\"\" re-initializes internal states when an episode starts, returns a s", "gamestate instance and updates # the display every time pacman or a ghost", "success, all food on map eaten.) If game should terminate, returns the proper", "as your CPU will permit; to slow things down so gameplay is actually", "def showDomain(self, a, s=None): if s is not None: errStr = 'ERROR: In", "# adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height -", "[ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): #", "= 0 i = 0 data.capsules = [] for char in str(self.layout_copy): if", "self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros( 2 + num_ghosts *", "dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector s and sets the internal", "i = 0 data.capsules = [] for char in str(self.layout_copy): if char ==", "2 + num_ghosts * 3 x = 0 y = 0 for char", "= open(fullname) grid = [line.strip() for line in f] f.close() return grid class", "self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of", "__license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which", "*nc* are map-dependent, and *ng* can be set as a parameter. Based on", "visualization runs as fast as your CPU will permit; to slow things down", "1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s):", "still on the board or not * [nc] binary variables for each capsule", "i * 3 + 2] = agent_states[i + 1].configuration.pos s[2 + i *", "s = np.zeros( 2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules) #", "Checks whether the game should terminate at the given state. (Terminate for failure,", "self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]]", "is set to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is None:", "makes an array of possible actions pacman can perform at any given #", "anyway, maybe clean up in # the future return np.array([0]) # makes an", "set ghost position num_ghosts = len(agent_states) - 1 for i in range(1, num_ghosts", "can perform at any given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions(", "pacman gamestate instance and updates # the display every time pacman or a", "can be set as a parameter. Based on above, total dimensionality of state", "state used by the original pacman package. \"\"\" # copies most recent state", "perform at any given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state,", "used in getLayout function f = open(fullname) grid = [line.strip() for line in", "char == \".\": s[i] = data.food[x][y] i += 1 elif char == \"\\n\":", "possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def", "pass a state (s)'\\ 'to showDomain(); Pacman only supports internal states.'\\ 'If you", "# graphics are updated after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten =", "# state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in", "= os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'),", "x and y coordinates as well as the scare time of each ghost", "1): part_s = s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0],", "# get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get", "= [line.strip() for line in f] f.close() return grid class DummyGraphics(object): def initialize(self,", "domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction)", "state vector has a series of dimensions: * [2] The x and y", "= self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros(", "data = self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos = (s[0],", "converts s vector in pacman gamestate instance and updates # the display every", "self.possibleActions() def s0(self): \"\"\" re-initializes internal states when an episode starts, returns a", "= deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings() self.restartGraphics =", "state. (Terminate for failure, ie eaten by ghost or out of time, and", "property(_get_state, _set_state) def showDomain(self, a, s=None): if s is not None: errStr =", "3*ng + nf + nc). **ACTIONS:** Move Pacman [up, down, left, right, stay]", "i * 3: 2 + i * 3 + 2] = agent_states[i +", "\"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the", "get food and capsules status i = 2 + num_ghosts * 3 x", "actions_num = 5 episodeCap = 1000 #: location of layouts shipped with rlpy", "s_food = s[(num_ghosts + 1) * 3:] x = 0 y = 0", "def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass def finalize(self, *arg,", "[2] The x and y coordinates of pacman * [3 * ng] the", "= s[(num_ghosts + 1) * 3:] x = 0 y = 0 i", "catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if", "self._defaultSettings() self.restartGraphics = None self.timerswitch = False self.savedtimer = None self.gameDisplay = None", "game state used by the original pacman package. \"\"\" # copies most recent", "= None self.timerswitch = False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman,", "s is not None: errStr = 'ERROR: In Pacman.py, attempted to pass a", "is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False", "# s.data.food is the correct food matrix s.data.layout.food = s.data.food for agent in", "pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return", "+ i * 3 + 2] = agent_states[i + 1].configuration.pos s[2 + i", "an array of limits for each dimension in the state vector. statespace_limits =", "a) next_state = next_state_p # pacman performs action \"a\" in current state object", "None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics()", "terminate at the given state. (Terminate for failure, ie eaten by ghost or", "pacman can perform at any given # state possibleActions = [] possibleMoves =", "+ 1): part_s = s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos =", "= \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__", "they can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2])", "noise: with this probability pacman makes a random move instead the one specified", "a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout)", "state = property(_get_state, _set_state) def showDomain(self, a, s=None): if s is not None:", "agent_states[i + 1].configuration.pos s[2 + i * 3 + 2] = agent_states[i +", "import graphicsDisplay import numpy as np from copy import deepcopy import os import", "proper indication to step function. Accounts for scoring changes in terminal states. \"\"\"", "domain, which acts as a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x", "not None: errStr = 'ERROR: In Pacman.py, attempted to pass a state (s)'\\", "a tuple of form (reward, new state vector, terminal) \"\"\" if self.random_state.random_sample() <", "* 3 + self.num_total_food + self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos", "numpy array \"\"\" data = self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) -", "next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes", "# time.sleep(0.1) # Sleep for 0.1 sec def step(self, a): \"\"\" Applies actions", "i * 3 + 2] = agent_states[i + 1].scaredTimer # get food and", "'ERROR: In Pacman.py, attempted to pass a state (s)'\\ 'to showDomain(); Pacman only", "down, left, right, stay] **REWARD:** See the Berkeley project website below for more", "checking. Returns a tuple of form (reward, new state vector, terminal) \"\"\" if", "changes in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum =", "- 1 s = np.zeros( 2 + num_ghosts * 3 + self.num_total_food +", "makes a random move instead the one specified by the action \"\"\" self.noise", "acts as a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course project", "runs as fast as your CPU will permit; to slow things down so", "= data.food[x][y] i += 1 elif char == \"\\n\": y += 1 x", "the Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector", "self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions()", "every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman", "char == \"o\": coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i", "elif char == \"\\n\": y += 1 x = -1 x += 1", "y += 1 x = -1 x += 1 def _get_state(self): \"\"\" get", "copy import deepcopy import os import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\"", "= self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman performs action \"a\" in current", "\"o\": coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1", "1 x += 1 return s state = property(_get_state, _set_state) def showDomain(self, a,", "data.food[x][y] i += 1 elif char == \"\\n\": y += 1 x =", "rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage import layout, pacman, game,", "ipdb.set_trace() # get ghost info for i in range(num_ghosts): s[2 + i *", "= self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True", "in the showDomain() method. **REFERENCE:** This domain is an RLPy wrapper for the", "to the given state. Internal states accounted for along with scoring and terminal", "= np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector s and sets", "0 i = 0 data.capsules = [] for char in str(self.layout_copy): if char", "game.Agent) for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used", "Berkeley project website below for more info. .. note:: The visualization runs as", "scared after consuming a capsule.) * [nf] binary variables indicating if a food", "by ghost or out of time, and for success, all food on map", "2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits", "the one specified by the action \"\"\" self.noise = noise # Specifies which", "self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules)", "class Pacman(Domain): \"\"\" Pacman domain, which acts as a wrapper for the Pacman", "= pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules)", "self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states when", "pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules", "Pacman domain to the given state. Internal states accounted for along with scoring", "statespace_limits = [] # adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2])", "+ 1].scaredTimer # get food and capsules status i = 2 + num_ghosts", "only supports internal states.'\\ 'If you do pass a state parameter, ensure it", "[] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions)", "f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self,", "self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p", "for along with scoring and terminal checking. Returns a tuple of form (reward,", "instead the one specified by the action \"\"\" self.noise = noise # Specifies", "self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman", "agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether", "In Pacman.py, attempted to pass a state (s)'\\ 'to showDomain(); Pacman only supports", "returns the proper indication to step function. Accounts for scoring changes in terminal", "self.noise = noise # Specifies which Pacman world you want self.layoutFile = layoutFile", "agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None", "= -1 elif char == \"o\": coord = (x, self.layout_copy.height - y) if", "randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions =", "self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet =", "s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten =", "more details of the domain see the original package in the `Domains/PacmanPackage` folder.", "noise # Specifies which Pacman world you want self.layoutFile = layoutFile # Puts", "in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game should", "self.timerswitch = False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def", "outside the Pacman domain to the given state. Internal states accounted for along", "self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in", "when an episode starts, returns a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules", "For more details of the domain see the original package in the `Domains/PacmanPackage`", "\"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename", "a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:**", "s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1 sec", "actions pacman can perform at any given # state possibleActions = [] possibleMoves", "layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self,", "if s_food[i]: data.capsules.append(coord) i += 1 elif char == \"\\n\": y += 1", "is still on the board or not * [nc] binary variables for each", "= agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) # Sleep", "indication to step function. Accounts for scoring changes in terminal states. \"\"\" return", "\"East\", \"South\", \"West\"] actions_num = 5 episodeCap = 1000 #: location of layouts", "Pacman world you want self.layoutFile = layoutFile # Puts the file in line", "np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game should terminate at the given", "Applies actions from outside the Pacman domain to the given state. Internal states", "ng] the x and y coordinates as well as the scare time of", "*arg, **kwargs): pass def update(self, *arg, **kwargs): pass def finalize(self, *arg, **kwargs): pass", "self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not matter", "ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np from copy import deepcopy", "self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s", "__author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts as a wrapper", "# eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height -", "in f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def", "+ 2] = agent_states[i + 1].scaredTimer # get food and capsules status i", "status i = 2 + num_ghosts * 3 x = 0 y =", "errStr = 'ERROR: In Pacman.py, attempted to pass a state (s)'\\ 'to showDomain();", "= 'ERROR: In Pacman.py, attempted to pass a state (s)'\\ 'to showDomain(); Pacman", "instance and updates # the display every time pacman or a ghost moves.", "# somewhat hacky, but should not matter anyway, maybe clean up in #", "with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30,", "+ self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector", "in str(self.layout_copy): if char == \".\": s[i] = data.food[x][y] i += 1 elif", "next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state", "(reward, new state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random Move", "state parameter, ensure it is set to None.' raise Exception(errStr) s = self.game_state", "range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO:", "for each capsule indicating if it is still on the board or not", "still on the board or not *nf* and *nc* are map-dependent, and *ng*", "is the correct food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved", "in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits", "self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy,", "vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions())", "capsule indicating if it is still on the board or not *nf* and", "# pacman performs action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) #", "= 1. i += 1 x += 1 return s state = property(_get_state,", "a ghost moves. # s.data.food is the correct food matrix s.data.layout.food = s.data.food", "\"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts as a wrapper for the", "ensure it is set to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay", "None self.timerswitch = False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__()", "given state. (Terminate for failure, ie eaten by ghost or out of time,", "state. Internal states accounted for along with scoring and terminal checking. Returns a", "self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1]) #", "= self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros( 2 + num_ghosts", "* i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] # set food and", "pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food = len(self.layout_copy.food.asList()) self.num_total_capsules = len(self.layout_copy.capsules) self._defaultSettings()", "self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self.num_total_food =", "= None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an", "project 3. **STATE:** The state vector has a series of dimensions: * [2]", "ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions))", "False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): #", "# converts s vector in pacman gamestate instance and updates # the display", "a capsule.) * [nf] binary variables indicating if a food is still on", "action \"\"\" self.noise = noise # Specifies which Pacman world you want self.layoutFile", "= self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states", "data.capsules.append(coord) i += 1 elif char == \"\\n\": y += 1 x =", "r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r,", "pacman or a ghost moves. # s.data.food is the correct food matrix s.data.layout.food", "\"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of", "s[2 + i * 3: 2 + i * 3 + 2] =", "in str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i]) i += 1 elif", "= self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1])", "agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r", "from the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has a series", "self.isTerminal(): # somewhat hacky, but should not matter anyway, maybe clean up in", "pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts,", "de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain is an RLPy wrapper", "# copies most recent state data = self.game_state.data agent_states = data.agentStates # set", "the x and y coordinates as well as the scare time of each", "limits for each dimension in the state vector. statespace_limits = [] # adds", "slow things down so gameplay is actually visible, de-comment time.sleep() in the showDomain()", "episode starts, returns a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30)", "x and y coordinates of pacman * [3 * ng] the x and", "+= 1 elif char == \"\\n\": y += 1 x = -1 x", "current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move", "array of limits for each dimension in the state vector. statespace_limits = []", "s_food[i]: data.capsules.append(coord) i += 1 elif char == \"\\n\": y += 1 x", "function f = open(fullname) grid = [line.strip() for line in f] f.close() return", "= layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game", "timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map file noise:", "ghosts self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules =", "parameter. Based on above, total dimensionality of state vector is map-dependent, and given", "of the domain see the original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time", "so gameplay is actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This", "self.num_total_food + self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb;", "self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics =", "\"\"\" Checks whether the game should terminate at the given state. (Terminate for", "possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a))", "pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info", "i in range(1, num_ghosts + 1): part_s = s[(3 * i) - 1:3", "pacman performs action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state,", "package. \"\"\" # copies most recent state data = self.game_state.data agent_states = data.agentStates", "statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food", "code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the original package", "# the ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or", "keep track of eaten stuff for graphics (original code assumes # graphics are", "an episode starts, returns a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules =", "nf + nc). **ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:** See", "= None s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1 sec def", "the scare time of each ghost (\"scare time\" is how long the ghost", "layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of ghosts self.numGhostAgents = numGhostAgents", "0 data.capsules = [] for char in str(self.layout_copy): if char == \".\": data.food[x][y]", "agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) # Sleep for", "DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass def finalize(self,", "returns a s vector \"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy =", "# the display every time pacman or a ghost moves. # s.data.food is", "terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a", "http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__", "capsules status i = 2 + num_ghosts * 3 x = 0 y", "left, right, stay] **REWARD:** See the Berkeley project website below for more info.", "\"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap = 1000 #: location of", "locations s_food = s[(num_ghosts + 1) * 3:] x = 0 y =", "- y) if coord in data.capsules: s[i] = 1. i += 1 x", "line in f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass", "pass a state parameter, ensure it is set to None.' raise Exception(errStr) s", "filename of the map file noise: with this probability pacman makes a random", "map file noise: with this probability pacman makes a random move instead the", "# adds ghost x, y locations and scaredTimer (how long they can be", "a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p", "showDomain() method. **REFERENCE:** This domain is an RLPy wrapper for the implementation from", "import deepcopy import os import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__", "# Makes an array of limits for each dimension in the state vector.", "Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has", "_set_state(self, s): \"\"\" Takes a vector s and sets the internal game state", "permit; to slow things down so gameplay is actually visible, de-comment time.sleep() in", "self.game_state.data agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros( 2", "indicating if a food is still on the board or not * [nc]", "y = 0 for char in str(self.layout_copy): if char == \".\": s[i] =", "# import ipdb; ipdb.set_trace() # get ghost info for i in range(num_ghosts): s[2", "for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0,", "in the state vector. statespace_limits = [] # adds pacman x, y locations", "import numpy as np from copy import deepcopy import os import time __copyright__", "+= 1 x = -1 elif char == \"o\": coord = (x, self.layout_copy.height", "random move instead the one specified by the action \"\"\" self.noise = noise", "x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds", "isTerminal(self): \"\"\" Checks whether the game should terminate at the given state. (Terminate", "map-dependent, and given by (2 + 3*ng + nf + nc). **ACTIONS:** Move", "= 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet = False", "= next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state =", "= 39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap", "for i in range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used in", "but should not matter anyway, maybe clean up in # the future return", "project website below for more info. .. note:: The visualization runs as fast", "import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\",", "# set pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts", "the Pacman domain to the given state. Internal states accounted for along with", "+ 2] = agent_states[i + 1].configuration.pos s[2 + i * 3 + 2]", "gameplay is actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain", "[3 * ng] the x and y coordinates as well as the scare", "more info. .. note:: The visualization runs as fast as your CPU will", "the state vector. statespace_limits = [] # adds pacman x, y locations statespace_limits.append([1,", "and y coordinates of pacman * [3 * ng] the x and y", "self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal():", "the showDomain() method. **REFERENCE:** This domain is an RLPy wrapper for the implementation", "domain see the original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39", "(how long they can be # eaten) for ghost in self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width", "self.numGhostAgents = numGhostAgents # Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout)", "num_ghosts + 1): part_s = s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos", "graphics (original code assumes # graphics are updated after every agent's move) next_state.data._foodEaten", "self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector s and", "from .Domain import Domain from .PacmanPackage import layout, pacman, game, ghostAgents from .PacmanPackage", "an array of possible actions pacman can perform at any given # state", "new state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random Move a", "1 elif char == \"\\n\": y += 1 x = -1 x +=", "self.layout.height - 2]) # adds ghost x, y locations and scaredTimer (how long", "default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir,", "self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat", "__credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ =", "data.food[x][y] = bool(s_food[i]) i += 1 elif char == \"o\": coord = (x,", "if s is not None: errStr = 'ERROR: In Pacman.py, attempted to pass", "\"\"\" if self.random_state.random_sample() < self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a =", "and updates # the display every time pacman or a ghost moves. #", "pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i in", "in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2", "2] = agent_states[i + 1].scaredTimer # get food and capsules status i =", "= \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts as a wrapper for", "- 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules)", "len(agent_states) - 1 s = np.zeros( 2 + num_ghosts * 3 + self.num_total_food", "an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_", "step function. Accounts for scoring changes in terminal states. \"\"\" return self.game_state.data._lose or", "s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer =", "statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\")", "+ nf + nc). **ACTIONS:** Move Pacman [up, down, left, right, stay] **REWARD:**", "s[i] = 1. i += 1 x += 1 return s state =", "pacman makes a random move instead the one specified by the action \"\"\"", "for 0.1 sec def step(self, a): \"\"\" Applies actions from outside the Pacman", "all food on map eaten.) If game should terminate, returns the proper indication", "in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\",", "for the Pacman implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The state", "y locations and scaredTimer (how long they can be # eaten) for ghost", "elif char == \"\\n\": y += 1 x = -1 elif char ==", "* i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2]", "for graphics (original code assumes # graphics are updated after every agent's move)", "the display every time pacman or a ghost moves. # s.data.food is the", "<https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the original package in the", "3-Clause\" __author__ = \"<NAME>\" class Pacman(Domain): \"\"\" Pacman domain, which acts as a", "visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain is an RLPy", "+ num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get pacman position s[:2]", "1000 #: location of layouts shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\",", "* 3 + 2] = agent_states[i + 1].scaredTimer # get food and capsules", "in range(num_ghosts): s[2 + i * 3: 2 + i * 3 +", "= part_s[2] # set food and capsules locations s_food = s[(num_ghosts + 1)", "moves. # s.data.food is the correct food matrix s.data.layout.food = s.data.food for agent", "+ self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace()", "states when an episode starts, returns a s vector \"\"\" self.game_state = pacman.GameState()", "return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent)", "= len(agent_states) - 1 for i in range(1, num_ghosts + 1): part_s =", "2]) # adds ghost x, y locations and scaredTimer (how long they can", "*nf* and *nc* are map-dependent, and *ng* can be set as a parameter.", "be set as a parameter. Based on above, total dimensionality of state vector", "game, ghostAgents from .PacmanPackage import graphicsDisplay import numpy as np from copy import", "Based on above, total dimensionality of state vector is map-dependent, and given by", "dimensions: * [2] The x and y coordinates of pacman * [3 *", "if coord in data.capsules: s[i] = 1. i += 1 x += 1", "next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score - self.game_state.data.score self.game_state", "self.layout_copy.height - y) if coord in data.capsules: s[i] = 1. i += 1", "[] # adds pacman x, y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height", "and given by (2 + 3*ng + nf + nc). **ACTIONS:** Move Pacman", "out of time, and for success, all food on map eaten.) If game", "def __init__(self, noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the", "Pacman domain, which acts as a wrapper for the Pacman implementation from the", "self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not", "of dimensions: * [2] The x and y coordinates of pacman * [3", "along with scoring and terminal checking. Returns a tuple of form (reward, new", "in # the future return np.array([0]) # makes an array of possible actions", "# TODO: use domain random stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state", "from outside the Pacman domain to the given state. Internal states accounted for", "of the map file noise: with this probability pacman makes a random move", "s): \"\"\" Takes a vector s and sets the internal game state used", "self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): #", "a): \"\"\" Applies actions from outside the Pacman domain to the given state.", "[nc] binary variables for each capsule indicating if it is still on the", "num_ghosts * 3 + self.num_total_food + self.num_total_capsules) # get pacman position s[:2] =", "self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in", "self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self):", "layoutFile: filename of the map file noise: with this probability pacman makes a", "for scoring changes in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self):", "terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal", "time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\",", "or a ghost moves. # s.data.food is the correct food matrix s.data.layout.food =", "= True return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky,", "dimensionality of state vector is map-dependent, and given by (2 + 3*ng +", "i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i)", "self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y locations", "1].scaredTimer # get food and capsules status i = 2 + num_ghosts *", "deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics", "range(self.ghostNum)] self.beQuiet = False def _tryToLoad(self, fullname): # used in getLayout function f", "most recent state data = self.game_state.data agent_states = data.agentStates # set pacman position", "for each dimension in the state vector. statespace_limits = [] # adds pacman", "\"West\"] actions_num = 5 episodeCap = 1000 #: location of layouts shipped with", "and *ng* can be set as a parameter. Based on above, total dimensionality", "< self.noise: # Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p =", "state data = self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos =", "* 3: 2 + i * 3 + 2] = agent_states[i + 1].configuration.pos", "The visualization runs as fast as your CPU will permit; to slow things", "at the given state. (Terminate for failure, ie eaten by ghost or out", "Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state =", "None: errStr = 'ERROR: In Pacman.py, attempted to pass a state (s)'\\ 'to", "each dimension in the state vector. statespace_limits = [] # adds pacman x,", "on the board or not *nf* and *nc* are map-dependent, and *ng* can", "== \"o\": coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i +=", "Sleep for 0.1 sec def step(self, a): \"\"\" Applies actions from outside the", "in line stripped format layout_file_content = self._tryToLoad(self.layoutFile) self.layout = layout.Layout(layout_file_content) # Number of", "elif char == \"o\": coord = (x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord)", "in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) #", "[\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap = 1000 #: location", "f.close() return grid class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg,", "self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a", "for i in range(num_ghosts): s[2 + i * 3: 2 + i *", "self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state,", "after every agent's move) next_state.data._foodEaten = next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in", "# Sleep for 0.1 sec def step(self, a): \"\"\" Applies actions from outside", "as np from copy import deepcopy import os import time __copyright__ = \"Copyright", "numGhostAgents=1000): \"\"\" layoutFile: filename of the map file noise: with this probability pacman", "self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not matter anyway,", "bool(s_food[i]) i += 1 elif char == \"o\": coord = (x, self.layout_copy.height -", "after consuming a capsule.) * [nf] binary variables indicating if a food is", "+ 1].configuration.pos s[2 + i * 3 + 2] = agent_states[i + 1].scaredTimer", "None # time.sleep(0.1) # Sleep for 0.1 sec def step(self, a): \"\"\" Applies", "open(fullname) grid = [line.strip() for line in f] f.close() return grid class DummyGraphics(object):", "\"\"\" self.noise = noise # Specifies which Pacman world you want self.layoutFile =", "part_s[2] # set food and capsules locations s_food = s[(num_ghosts + 1) *", "= self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif", "BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has a series of dimensions:", "eaten.) If game should terminate, returns the proper indication to step function. Accounts", "None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood()", "or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i", "world you want self.layoutFile = layoutFile # Puts the file in line stripped", "[line.strip() for line in f] f.close() return grid class DummyGraphics(object): def initialize(self, *arg,", "move instead the one specified by the action \"\"\" self.noise = noise #", "self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules()", "as a parameter. Based on above, total dimensionality of state vector is map-dependent,", "np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector s and sets the", "how long the ghost remains scared after consuming a capsule.) * [nf] binary", "Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state", "Pacman.py, attempted to pass a state (s)'\\ 'to showDomain(); Pacman only supports internal", "should terminate, returns the proper indication to step function. Accounts for scoring changes", "it is still on the board or not *nf* and *nc* are map-dependent,", "self.beQuiet = False def _tryToLoad(self, fullname): # used in getLayout function f =", "vector is map-dependent, and given by (2 + 3*ng + nf + nc).", "1 elif char == \"o\": coord = (x, self.layout_copy.height - y) if s_food[i]:", "track of eaten stuff for graphics (original code assumes # graphics are updated", "= next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score -", "in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts", "implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source code", "of limits for each dimension in the state vector. statespace_limits = [] #", "coord = (x, self.layout_copy.height - y) if coord in data.capsules: s[i] = 1.", "+= 1 return s state = property(_get_state, _set_state) def showDomain(self, a, s=None): if", "display every time pacman or a ghost moves. # s.data.food is the correct", ".. note:: The visualization runs as fast as your CPU will permit; to", "num_ghosts = len(agent_states) - 1 for i in range(1, num_ghosts + 1): part_s", "s[i] = data.food[x][y] i += 1 elif char == \"\\n\": y += 1", "next_state_p.data._foodEaten next_state.data._capsuleEaten = next_state_p.data._capsuleEaten # scoring in pacman r = next_state.data.score - self.game_state.data.score", "from .PacmanPackage import graphicsDisplay import numpy as np from copy import deepcopy import", "= pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet,", "statespace_limits.append([1, self.layout.height - 2]) # adds ghost x, y locations and scaredTimer (how", "self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of limits for each", "specified by the action \"\"\" self.noise = noise # Specifies which Pacman world", "as well as the scare time of each ghost (\"scare time\" is how", "above, total dimensionality of state vector is map-dependent, and given by (2 +", "y locations statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) # adds ghost", "ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten stuff for graphics", "time.sleep() in the showDomain() method. **REFERENCE:** This domain is an RLPy wrapper for", "self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics:", "correct food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent", "eaten stuff for graphics (original code assumes # graphics are updated after every", "as the scare time of each ghost (\"scare time\" is how long the", "[up, down, left, right, stay] **REWARD:** See the Berkeley project website below for", "data.capsules: s[i] = 1. i += 1 x += 1 return s state", "board or not * [nc] binary variables for each capsule indicating if it", "the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\", \"North\", \"East\", \"South\",", "performs action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0)", "(\"scare time\" is how long the ghost remains scared after consuming a capsule.)", "dimension in the state vector. statespace_limits = [] # adds pacman x, y", "1 s = np.zeros( 2 + num_ghosts * 3 + self.num_total_food + self.num_total_capsules)", "pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False) self.game_state.data.initialize(self.layout_copy, self.numGhostAgents) self._cleanup_graphics = True return self.state, self.isTerminal(),", "the future return np.array([0]) # makes an array of possible actions pacman can", "scoring changes in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum", "def _set_statespace_limits(self): # Makes an array of limits for each dimension in the", "position agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts = len(agent_states) -", "not matter anyway, maybe clean up in # the future return np.array([0]) #", "str(self.layout_copy): if char == \".\": s[i] = data.food[x][y] i += 1 elif char", "form (reward, new state vector, terminal) \"\"\" if self.random_state.random_sample() < self.noise: # Random", "variables for each capsule indicating if it is still on the board or", "- 1 for i in range(1, num_ghosts + 1): part_s = s[(3 *", "= next_state_p # pacman performs action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state,", "time\" is how long the ghost remains scared after consuming a capsule.) *", "is not None: errStr = 'ERROR: In Pacman.py, attempted to pass a state", "= self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of", "# Random Move a = self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a)", "copies most recent state data = self.game_state.data agent_states = data.agentStates # set pacman", "and *nc* are map-dependent, and *ng* can be set as a parameter. Based", "= s[(3 * i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer", "in pacman gamestate instance and updates # the display every time pacman or", "object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for", "scoring and terminal checking. Returns a tuple of form (reward, new state vector,", "x, y locations and scaredTimer (how long they can be # eaten) for", "statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits =", "ghost x, y locations and scaredTimer (how long they can be # eaten)", "self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self, s): \"\"\" Takes a vector s", "ghost moves. # s.data.food is the correct food matrix s.data.layout.food = s.data.food for", "is how long the ghost remains scared after consuming a capsule.) * [nf]", "- self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(), terminal, self.possibleActions()", "info for i in range(num_ghosts): s[2 + i * 3: 2 + i", "the game should terminate at the given state. (Terminate for failure, ie eaten", "1) * 3:] x = 0 y = 0 i = 0 data.capsules", "\"\"\" re-initializes internal states when an episode starts, returns a s vector \"\"\"", "\"\"\" get the internal game state represented as a numpy array \"\"\" data", "state vector is map-dependent, and given by (2 + 3*ng + nf +", "i += 1 elif char == \"o\": coord = (x, self.layout_copy.height - y)", "0 for char in str(self.layout_copy): if char == \".\": s[i] = data.food[x][y] i", "ipdb; ipdb.set_trace() # get ghost info for i in range(num_ghosts): s[2 + i", "time of each ghost (\"scare time\" is how long the ghost remains scared", "terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts", "for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state,", "s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info for i", "RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\"", "\".\": s[i] = data.food[x][y] i += 1 elif char == \"\\n\": y +=", "which acts as a wrapper for the Pacman implementation from the BerkeleyX/CS188.1x course", "probability pacman makes a random move instead the one specified by the action", "position s[:2] = agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info for", "\"\"\" self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame(", "and scaredTimer (how long they can be # eaten) for ghost in self.game_state.data.agentStates[1:]:", "total dimensionality of state vector is map-dependent, and given by (2 + 3*ng", "-1 elif char == \"o\": coord = (x, self.layout_copy.height - y) if coord", "char in str(self.layout_copy): if char == \".\": data.food[x][y] = bool(s_food[i]) i += 1", "\"\"\"Pacman game domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage", "from rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage import layout, pacman,", "self.game_state.data._lose or self.game_state.data._win def _defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for", "3:] x = 0 y = 0 i = 0 data.capsules = []", "3 + self.num_total_food + self.num_total_capsules) # get pacman position s[:2] = agent_states[0].configuration.pos #", "board or not *nf* and *nc* are map-dependent, and *ng* can be set", "len(agent_states) - 1 for i in range(1, num_ghosts + 1): part_s = s[(3", "a random move instead the one specified by the action \"\"\" self.noise =", "**STATE:** The state vector has a series of dimensions: * [2] The x", "self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector in pacman gamestate instance and", "def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should not matter anyway, maybe", "self.game_rules = pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(),", "for the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original", "remains scared after consuming a capsule.) * [nf] binary variables indicating if a", "self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p # pacman", "next_state_p # pacman performs action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a)", "a) # pacman.GhostRules.checkDeath(self.game_state, 0) # the ghosts move randomly for i in range(1,", "range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1)", "stream randomAction_ind = self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep", "i += 1 x += 1 return s state = property(_get_state, _set_state) def", "# makes an array of possible actions pacman can perform at any given", "import __rlpy_location__ from .Domain import Domain from .PacmanPackage import layout, pacman, game, ghostAgents", "s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1 sec def step(self, a):", "in pacman r = next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal()", "'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map file noise: with this probability", "a food is still on the board or not * [nc] binary variables", "this probability pacman makes a random move instead the one specified by the", "`source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the original", "not *nf* and *nc* are map-dependent, and *ng* can be set as a", "_defaultSettings(self): self.ghostNum = 2 self.ghosts = [ghostAgents.RandomGhost( game.Agent) for i in range(self.ghostNum)] self.beQuiet", "clean up in # the future return np.array([0]) # makes an array of", "* ng] the x and y coordinates as well as the scare time", "i) - 1:3 * i] agent_states[i].configuration.pos = (part_s[0], part_s[1]) agent_states[i].scaredTimer = part_s[2] #", "self.restartGraphics = None self.timerswitch = False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits()", "matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data) s._foodEaten", "recent state data = self.game_state.data agent_states = data.agentStates # set pacman position agent_states.configuration.pos", "things down so gameplay is actually visible, de-comment time.sleep() in the showDomain() method.", "ghosts move randomly for i in range(1, len(self.game_state.data.agentStates)): if next_state.isWin() or next_state.isLose(): break", "self.random_state.randint(len(ghostOptions)) randomAction = ghostOptions[randomAction_ind] next_state = next_state.generateSuccessor(i, randomAction) # keep track of eaten", "r, self._get_state(), terminal, self.possibleActions() def s0(self): \"\"\" re-initializes internal states when an episode", "self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits += [[0, 1]] * ( self.num_total_food +", "agent_states = data.agentStates # set pacman position agent_states.configuration.pos = (s[0], s[1]) # set", "3 x = 0 y = 0 for char in str(self.layout_copy): if char", "Accounts for scoring changes in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win def", "vector has a series of dimensions: * [2] The x and y coordinates", "to None.' raise Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay =", "2 + i * 3 + 2] = agent_states[i + 1].configuration.pos s[2 +", "agent_states[0].configuration.pos # import ipdb; ipdb.set_trace() # get ghost info for i in range(num_ghosts):", "next_state.data.score - self.game_state.data.score self.game_state = next_state terminal = self.isTerminal() return r, self._get_state(), terminal,", "If game should terminate, returns the proper indication to step function. Accounts for", "your CPU will permit; to slow things down so gameplay is actually visible,", "will permit; to slow things down so gameplay is actually visible, de-comment time.sleep()", "agent_states = self.game_state.data.agentStates num_ghosts = len(agent_states) - 1 s = np.zeros( 2 +", "graphicsDisplay import numpy as np from copy import deepcopy import os import time", "if a food is still on the board or not * [nc] binary", "episodeCap = 1000 #: location of layouts shipped with rlpy default_layout_dir = os.path.join(", "the internal game state represented as a numpy array \"\"\" data = self.game_state.data", "is actually visible, de-comment time.sleep() in the showDomain() method. **REFERENCE:** This domain is", "return self.state, self.isTerminal(), self.possibleActions() def possibleActions(self): if self.isTerminal(): # somewhat hacky, but should", "original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see the", "of possible actions pacman can perform at any given # state possibleActions =", "noise=.1, timeout=30, layoutFile=os.path.join( default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map file", "coordinates as well as the scare time of each ghost (\"scare time\" is", "food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved = agent self.gameDisplay.update(s.data)", "shipped with rlpy default_layout_dir = os.path.join( __rlpy_location__, \"Domains\", \"PacmanPackage\", \"layouts\") def __init__(self, noise=.1,", "the map file noise: with this probability pacman makes a random move instead", "= 5 episodeCap = 1000 #: location of layouts shipped with rlpy default_layout_dir", "# Intitializes Pacman game self.game_state = pacman.GameState() self.game_rules = pacman.ClassicGameRules(timeout) self.layout_copy = deepcopy(self.layout)", "(x, self.layout_copy.height - y) if s_food[i]: data.capsules.append(coord) i += 1 elif char ==", "original package in the `Domains/PacmanPackage` folder. \"\"\" _max_scared_time = 39 actions = [\"Stop\",", "action \"a\" in current state object # pacman.PacmanRules.applyAction(self.game_state, a) # pacman.GhostRules.checkDeath(self.game_state, 0) #", "return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the game should terminate at the", "the original `source code (zipped) <https://courses.edx.org/static/content-berkeley-cs188x~2013_Spring/projects/reinforcement/reinforcement.zip>`_ For more details of the domain see", "1 x = -1 elif char == \"o\": coord = (x, self.layout_copy.height -", "set food and capsules locations s_food = s[(num_ghosts + 1) * 3:] x", "get the internal game state represented as a numpy array \"\"\" data =", "num_ghosts = len(agent_states) - 1 s = np.zeros( 2 + num_ghosts * 3", "39 actions = [\"Stop\", \"North\", \"East\", \"South\", \"West\"] actions_num = 5 episodeCap =", "for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self): \"\"\" Checks whether the", "import ipdb; ipdb.set_trace() # get ghost info for i in range(num_ghosts): s[2 +", "self.gameDisplay.update(s.data) s._foodEaten = None s._capsuleEaten = None # time.sleep(0.1) # Sleep for 0.1", "pacman * [3 * ng] the x and y coordinates as well as", "the Berkeley project website below for more info. .. note:: The visualization runs", "= False self.savedtimer = None self.gameDisplay = None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self):", "+= [[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def", "# keep track of eaten stuff for graphics (original code assumes # graphics", "== \"o\": coord = (x, self.layout_copy.height - y) if coord in data.capsules: s[i]", "y += 1 x = -1 elif char == \"o\": coord = (x,", "the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the original `source", "default_layout_dir, 'trickyClassic.lay'), numGhostAgents=1000): \"\"\" layoutFile: filename of the map file noise: with this", "= bool(s_food[i]) i += 1 elif char == \"o\": coord = (x, self.layout_copy.height", "state vector. statespace_limits = [] # adds pacman x, y locations statespace_limits.append([1, self.layout.width", "get ghost info for i in range(num_ghosts): s[2 + i * 3: 2", "implementation from the BerkeleyX/CS188.1x course project 3. **STATE:** The state vector has a", "of eaten stuff for graphics (original code assumes # graphics are updated after", "= -1 x += 1 def _get_state(self): \"\"\" get the internal game state", "1 elif char == \"\\n\": y += 1 x = -1 elif char", "s=None): if s is not None: errStr = 'ERROR: In Pacman.py, attempted to", "by (2 + 3*ng + nf + nc). **ACTIONS:** Move Pacman [up, down,", "char == \"o\": coord = (x, self.layout_copy.height - y) if coord in data.capsules:", "_set_state) def showDomain(self, a, s=None): if s is not None: errStr = 'ERROR:", "states.'\\ 'If you do pass a state parameter, ensure it is set to", "wrapper for the implementation from the `BerkeleyX/CS188.1x course project 3 <https://courses.edx.org/courses/BerkeleyX/CS188.1x/2013_Spring/courseware/Week_9/Project_3_Reinforcement/>`_ See the", "self.game_state.data.agentStates[1:]: statespace_limits.append([1, self.layout.width - 2]) statespace_limits.append([1, self.layout.height - 2]) statespace_limits.append([0, self._max_scared_time]) statespace_limits +=", "getLayout function f = open(fullname) grid = [line.strip() for line in f] f.close()", "= 0 for char in str(self.layout_copy): if char == \".\": s[i] = data.food[x][y]", "a state (s)'\\ 'to showDomain(); Pacman only supports internal states.'\\ 'If you do", "self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules( self.gameDisplay.layout.capsules) # converts s vector", "\"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ =", "game domain.\"\"\" from rlpy.Tools import __rlpy_location__ from .Domain import Domain from .PacmanPackage import", "= noise # Specifies which Pacman world you want self.layoutFile = layoutFile #", "def _set_state(self, s): \"\"\" Takes a vector s and sets the internal game", "data.capsules = [] for char in str(self.layout_copy): if char == \".\": data.food[x][y] =", "= self.random_state.choice(self.possibleActions()) a = self.actions[a] next_state_p = self.game_state.generateSuccessor(0, a) next_state = next_state_p #", "the correct food matrix s.data.layout.food = s.data.food for agent in range(len(s.data.agentStates)): s.data._agentMoved =", "char == \".\": data.food[x][y] = bool(s_food[i]) i += 1 elif char == \"o\":", "* [nf] binary variables indicating if a food is still on the board", "class DummyGraphics(object): def initialize(self, *arg, **kwargs): pass def update(self, *arg, **kwargs): pass def", "**REFERENCE:** This domain is an RLPy wrapper for the implementation from the `BerkeleyX/CS188.1x", "next_state.isLose(): break ghostOptions = pacman.GhostRules.getLegalActions(next_state, i) # TODO: use domain random stream randomAction_ind", "y = 0 i = 0 data.capsules = [] for char in str(self.layout_copy):", "pacman.ClassicGameRules(timeout=30) self.layout_copy = deepcopy(self.layout) self.game = self.game_rules.newGame( self.layout_copy, pacman, self.ghosts, DummyGraphics(), self.beQuiet, catchExceptions=False)", "set pacman position agent_states.configuration.pos = (s[0], s[1]) # set ghost position num_ghosts =", "Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data)", "a series of dimensions: * [2] The x and y coordinates of pacman", "and for success, all food on map eaten.) If game should terminate, returns", "raise Exception(errStr) s = self.game_state if self.gameDisplay is None: self.gameDisplay = graphicsDisplay.PacmanGraphics() self.gameDisplay.startGraphics(self)", "time.sleep(0.1) # Sleep for 0.1 sec def step(self, a): \"\"\" Applies actions from", "= [\"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\", \"<NAME>\"] __license__ = \"BSD 3-Clause\" __author__ = \"<NAME>\"", "self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood( self.gameDisplay.layout.food) self.gameDisplay.capsules = self.gameDisplay.drawCapsules(", "maybe clean up in # the future return np.array([0]) # makes an array", "None self._set_statespace_limits() super(Pacman, self).__init__() def _set_statespace_limits(self): # Makes an array of limits for", "the board or not * [nc] binary variables for each capsule indicating if", "at any given # state possibleActions = [] possibleMoves = pacman.GameState.getLegalActions( self.game_state, agentIndex=0)", "import os import time __copyright__ = \"Copyright 2013, RLPy http://acl.mit.edu/RLPy\" __credits__ = [\"<NAME>\",", "[[0, 1]] * ( self.num_total_food + self.num_total_capsules) self.statespace_limits = np.array(statespace_limits, dtype=\"float\") def _set_state(self,", "function. Accounts for scoring changes in terminal states. \"\"\" return self.game_state.data._lose or self.game_state.data._win", "self.gameDisplay.startGraphics(self) self.gameDisplay.drawStaticObjects(s.data) self.gameDisplay.drawAgentObjects(s.data) elif self._cleanup_graphics: self._cleanup_graphics = False self.gameDisplay.removeAllFood() self.gameDisplay.removeAllCapsules() self.gameDisplay.food = self.gameDisplay.drawFood(", "= pacman.GameState.getLegalActions( self.game_state, agentIndex=0) for a in possibleMoves: possibleActions.append(self.actions.index(a)) return np.array(possibleActions) def isTerminal(self):", "below for more info. .. note:: The visualization runs as fast as your" ]
[ "'online/$year/$volume/foo' If the respective collection does not exist, it will be created before", "working directory. template: Template which will be filled with year and volume. In", "def get_working_directory(template): \"\"\"Return the collection which is the main working directory. template: Template", "max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection", "with year and volume. In ``template`` the placeholders $year and $volume will be", "In ``template`` the placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo' If", "respective collection does not exist, it will be created before returning it. \"\"\"", "the collection which is the main working directory. template: Template which will be", "import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default", "as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year =", "main working directory. template: Template which will be filled with year and volume.", "is the main working directory. template: Template which will be filled with year", "Template which will be filled with year and volume. In ``template`` the placeholders", "$year and $volume will be replaced. Example: 'online/$year/$volume/foo' If the respective collection does", "and $volume will be replaced. Example: 'online/$year/$volume/foo' If the respective collection does not", "$volume will be replaced. Example: 'online/$year/$volume/foo' If the respective collection does not exist,", "be replaced. Example: 'online/$year/$volume/foo' If the respective collection does not exist, it will", "zeit.cms.i18n import MessageFactory as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS", "zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100)", "CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default", "volume. In ``template`` the placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo'", "zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def", "default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which", "volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which is the main working", "from zeit.cms.i18n import MessageFactory as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global", "= zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54)", "the respective collection does not exist, it will be created before returning it.", "IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume =", "settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"),", "title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template):", "the main working directory. template: Template which will be filled with year and", "_ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int(", "placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo' If the respective collection", "collection which is the main working directory. template: Template which will be filled", "will be filled with year and volume. In ``template`` the placeholders $year and", "filled with year and volume. In ``template`` the placeholders $year and $volume will", "``template`` the placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo' If the", "the placeholders $year and $volume will be replaced. Example: 'online/$year/$volume/foo' If the respective", "default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1,", "which is the main working directory. template: Template which will be filled with", "If the respective collection does not exist, it will be created before returning", "zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"),", "= zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which is", "will be replaced. Example: 'online/$year/$volume/foo' If the respective collection does not exist, it", "get_working_directory(template): \"\"\"Return the collection which is the main working directory. template: Template which", "MessageFactory as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year", "year and volume. In ``template`` the placeholders $year and $volume will be replaced.", "template: Template which will be filled with year and volume. In ``template`` the", "replaced. Example: 'online/$year/$volume/foo' If the respective collection does not exist, it will be", "class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume", "\"\"\"Return the collection which is the main working directory. template: Template which will", "which will be filled with year and volume. In ``template`` the placeholders $year", "year\"), min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return", "zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which is the", "be filled with year and volume. In ``template`` the placeholders $year and $volume", "\"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900, max=2100) default_volume = zope.schema.Int(", "directory. template: Template which will be filled with year and volume. In ``template``", "Example: 'online/$year/$volume/foo' If the respective collection does not exist, it will be created", "min=1900, max=2100) default_volume = zope.schema.Int( title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the", "import MessageFactory as _ import zope.interface import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\"", "max=54) def get_working_directory(template): \"\"\"Return the collection which is the main working directory. template:", "title=_(\"Default volume\"), min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which is the main", "and volume. In ``template`` the placeholders $year and $volume will be replaced. Example:", "import zope.schema class IGlobalSettings(zope.interface.Interface): \"\"\"Global CMS settings.\"\"\" default_year = zope.schema.Int( title=_(\"Default year\"), min=1900,", "min=1, max=54) def get_working_directory(template): \"\"\"Return the collection which is the main working directory." ]
[ "- j) else: j = (i - 2) // 2 print(M + 2", "1): if i % 2 == 1: j = (i - 1) //", "if i % 2 == 1: j = (i - 1) // 2", "for i in range(1, M + 1): if i % 2 == 1:", "i % 2 == 1: j = (i - 1) // 2 print(1", "1) // 2 print(1 + j, M + 1 - j) else: j", "// 2 print(1 + j, M + 1 - j) else: j =", "M + 1 - j) else: j = (i - 2) // 2", "else: j = (i - 2) // 2 print(M + 2 + j,", "+ j, M + 1 - j) else: j = (i - 2)", "= (i - 2) // 2 print(M + 2 + j, 2 *", "== 1: j = (i - 1) // 2 print(1 + j, M", "(i - 1) // 2 print(1 + j, M + 1 - j)", "map(int, input().split()) for i in range(1, M + 1): if i % 2", "input().split()) for i in range(1, M + 1): if i % 2 ==", "- 2) // 2 print(M + 2 + j, 2 * M +", "<filename>abc/abc165/abc165e.py N, M = map(int, input().split()) for i in range(1, M + 1):", "j) else: j = (i - 2) // 2 print(M + 2 +", "= map(int, input().split()) for i in range(1, M + 1): if i %", "2 print(1 + j, M + 1 - j) else: j = (i", "+ 1 - j) else: j = (i - 2) // 2 print(M", "j = (i - 2) // 2 print(M + 2 + j, 2", "j = (i - 1) // 2 print(1 + j, M + 1", "print(1 + j, M + 1 - j) else: j = (i -", "j, M + 1 - j) else: j = (i - 2) //", "2 print(M + 2 + j, 2 * M + 1 - j)", "2 == 1: j = (i - 1) // 2 print(1 + j,", "M = map(int, input().split()) for i in range(1, M + 1): if i", "M + 1): if i % 2 == 1: j = (i -", "% 2 == 1: j = (i - 1) // 2 print(1 +", "= (i - 1) // 2 print(1 + j, M + 1 -", "// 2 print(M + 2 + j, 2 * M + 1 -", "1: j = (i - 1) // 2 print(1 + j, M +", "(i - 2) // 2 print(M + 2 + j, 2 * M", "range(1, M + 1): if i % 2 == 1: j = (i", "N, M = map(int, input().split()) for i in range(1, M + 1): if", "- 1) // 2 print(1 + j, M + 1 - j) else:", "in range(1, M + 1): if i % 2 == 1: j =", "+ 1): if i % 2 == 1: j = (i - 1)", "i in range(1, M + 1): if i % 2 == 1: j", "1 - j) else: j = (i - 2) // 2 print(M +", "2) // 2 print(M + 2 + j, 2 * M + 1" ]
[ "License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "writing, software # distributed under the License is distributed on an \"AS IS\"", "return [name.strip() for name in fd.readlines()] with open('README.rst', 'r') as fd: long_description =", "Unless required by applicable law or agreed to in writing, software # distributed", "line in fd: m = reg.match(line) if m: __version__ = m.group(1) break def", "See the # License for the specific language governing permissions and limitations #", "for line in fd: m = reg.match(line) if m: __version__ = m.group(1) break", ":: OSI Approved :: Apache Software License\", \"Intended Audience :: Developers\", \"Programming Language", "setup from setuptools.command.test import test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r')", "def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for name in fd.readlines()]", "\"License\"); you may # not use this file except in compliance with the", "= m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for", "Apache License, Version 2.0 (the \"License\"); you may # not use this file", "\"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved :: Apache Software", "the License. You may obtain # a copy of the License at #", "for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development", "law or agreed to in writing, software # distributed under the License is", "License. import re import sys from setuptools import setup from setuptools.command.test import test", "may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "the Apache License, Version 2.0 (the \"License\"); you may # not use this", "author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache License", "setuptools.command.test import test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd:", "5 - Production/Stable\", \"License :: OSI Approved :: Apache Software License\", \"Intended Audience", "python # -*- coding: utf-8 -*- # Licensed under the Apache License, Version", "import re import sys from setuptools import setup from setuptools.command.test import test as", "description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\"", "express or implied. See the # License for the specific language governing permissions", "in fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__,", "m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd: return", "TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ =", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "CONDITIONS OF ANY KIND, either express or implied. See the # License for", "not use this file except in compliance with the License. You may obtain", "install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved ::", "Production/Stable\", \"License :: OSI Approved :: Apache Software License\", \"Intended Audience :: Developers\",", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "with the License. You may obtain # a copy of the License at", "for the specific language governing permissions and limitations # under the License. import", "= fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may # not", "license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5", "License for the specific language governing permissions and limitations # under the License.", "the License. import re import sys from setuptools import setup from setuptools.command.test import", "and limitations # under the License. import re import sys from setuptools import", "with open('requirements.txt', 'r') as fd: return [name.strip() for name in fd.readlines()] with open('README.rst',", "Approved :: Apache Software License\", \"Intended Audience :: Developers\", \"Programming Language :: Python", "2.0 (the \"License\"); you may # not use this file except in compliance", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "License\", \"Intended Audience :: Developers\", \"Programming Language :: Python :: 3\", \"Topic ::", "_requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for name in fd.readlines()] with", "fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK", "m = reg.match(line) if m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt',", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "-*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the", "use this file except in compliance with the License. You may obtain #", "# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "# under the License. import re import sys from setuptools import setup from", "open('requirements.txt', 'r') as fd: return [name.strip() for name in fd.readlines()] with open('README.rst', 'r')", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #", "compliance with the License. You may obtain # a copy of the License", "the specific language governing permissions and limitations # under the License. import re", "__version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip()", "for name in fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read() setup(", "License, Version 2.0 (the \"License\"); you may # not use this file except", "language governing permissions and limitations # under the License. import re import sys", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "= re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line) if m:", "fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\",", "SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "implied. See the # License for the specific language governing permissions and limitations", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "OF ANY KIND, either express or implied. See the # License for the", "'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m", "OSI Approved :: Apache Software License\", \"Intended Audience :: Developers\", \"Programming Language ::", "= reg.match(line) if m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r')", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", ":: Apache Software License\", \"Intended Audience :: Developers\", \"Programming Language :: Python ::", "import sys from setuptools import setup from setuptools.command.test import test as TestCommand __version__", "'' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line", "long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status ::", "[\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line) if m: __version__ = m.group(1)", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache License 2.0',", "name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\",", "you may # not use this file except in compliance with the License.", "re import sys from setuptools import setup from setuptools.command.test import test as TestCommand", "with open('README.rst', 'r') as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\",", "Status :: 5 - Production/Stable\", \"License :: OSI Approved :: Apache Software License\",", "Audience :: Developers\", \"Programming Language :: Python :: 3\", \"Topic :: Software Development\"", "agreed to in writing, software # distributed under the License is distributed on", "governing permissions and limitations # under the License. import re import sys from", "fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook", "specific language governing permissions and limitations # under the License. import re import", "(the \"License\"); you may # not use this file except in compliance with", "setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for", "classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved :: Apache", "'r') as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\",", "KIND, either express or implied. See the # License for the specific language", "may # not use this file except in compliance with the License. You", "import setup from setuptools.command.test import test as TestCommand __version__ = '' with open('facebookbot/__about__.py',", "either express or implied. See the # License for the specific language governing", "[name.strip() for name in fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read()", "Developers\", \"Programming Language :: Python :: 3\", \"Topic :: Software Development\" ] )", "# # Unless required by applicable law or agreed to in writing, software", "import test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg", "file except in compliance with the License. You may obtain # a copy", "open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd:", "this file except in compliance with the License. You may obtain # a", "author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache", "# Unless required by applicable law or agreed to in writing, software #", "# -*- coding: utf-8 -*- # Licensed under the Apache License, Version 2.0", "by applicable law or agreed to in writing, software # distributed under the", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line) if m: __version__", "reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line) if", "as fd: return [name.strip() for name in fd.readlines()] with open('README.rst', 'r') as fd:", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "or implied. See the # License for the specific language governing permissions and", "from setuptools import setup from setuptools.command.test import test as TestCommand __version__ = ''", "reg.match(line) if m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r') as", "if m: __version__ = m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd:", "long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging", "as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m =", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "break def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for name in", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Licensed under the Apache License,", "__version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]')", "License. You may obtain # a copy of the License at # #", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", ":: 5 - Production/Stable\", \"License :: OSI Approved :: Apache Software License\", \"Intended", "= [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line) if m: __version__ =", "License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 -", "\"Intended Audience :: Developers\", \"Programming Language :: Python :: 3\", \"Topic :: Software", "packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "fd: m = reg.match(line) if m: __version__ = m.group(1) break def _requirements(): with", "fd: return [name.strip() for name in fd.readlines()] with open('README.rst', 'r') as fd: long_description", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\",", "ANY KIND, either express or implied. See the # License for the specific", "the # License for the specific language governing permissions and limitations # under", "except in compliance with the License. You may obtain # a copy of", "utf-8 -*- # Licensed under the Apache License, Version 2.0 (the \"License\"); you", "], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "- Production/Stable\", \"License :: OSI Approved :: Apache Software License\", \"Intended Audience ::", "Apache Software License\", \"Intended Audience :: Developers\", \"Programming Language :: Python :: 3\",", "fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in fd: m = reg.match(line)", "m.group(1) break def _requirements(): with open('requirements.txt', 'r') as fd: return [name.strip() for name", "to in writing, software # distributed under the License is distributed on an", "Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status", "You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "Software License\", \"Intended Audience :: Developers\", \"Programming Language :: Python :: 3\", \"Topic", "sys from setuptools import setup from setuptools.command.test import test as TestCommand __version__ =", "setuptools import setup from setuptools.command.test import test as TestCommand __version__ = '' with", "permissions and limitations # under the License. import re import sys from setuptools", "\"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License :: OSI", "required by applicable law or agreed to in writing, software # distributed under", "limitations # under the License. import re import sys from setuptools import setup", "as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\",", "applicable law or agreed to in writing, software # distributed under the License", "url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\",", "-*- # Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT #", "= '' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for", "OR CONDITIONS OF ANY KIND, either express or implied. See the # License", "with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__ = [\\'\"]([^\\'\"]*)[\\'\"]') for line in", "obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "open('README.rst', 'r') as fd: long_description = fd.read() setup( name=\"fbsdk\", version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\",", "test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg =", "version=__version__, author=\"<NAME>\", author_email=\"<EMAIL>\", maintainer=\"<NAME>\", maintainer_email=\"<EMAIL>\", url=\"https://github.com/boompieman/fbsdk\", description=\"Facebook Messaging API SDK for Python\", long_description=long_description,", "as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as fd: reg = re.compile(r'__version__", "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may #", "in compliance with the License. You may obtain # a copy of the", "# not use this file except in compliance with the License. You may", "name in fd.readlines()] with open('README.rst', 'r') as fd: long_description = fd.read() setup( name=\"fbsdk\",", "or agreed to in writing, software # distributed under the License is distributed", "'r') as fd: return [name.strip() for name in fd.readlines()] with open('README.rst', 'r') as", ":: Developers\", \"Programming Language :: Python :: 3\", \"Topic :: Software Development\" ]", "# License for the specific language governing permissions and limitations # under the", "in fd: m = reg.match(line) if m: __version__ = m.group(1) break def _requirements():", "\"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(), classifiers=[ \"Development Status :: 5 - Production/Stable\", \"License ::", "from setuptools.command.test import test as TestCommand __version__ = '' with open('facebookbot/__about__.py', 'r') as", "Messaging API SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ],", "\"License :: OSI Approved :: Apache Software License\", \"Intended Audience :: Developers\", \"Programming", "coding: utf-8 -*- # Licensed under the Apache License, Version 2.0 (the \"License\");", "<reponame>giggslam/python-messengerbot-sdk<filename>setup.py #!/usr/bin/env python # -*- coding: utf-8 -*- # Licensed under the Apache", "under the Apache License, Version 2.0 (the \"License\"); you may # not use", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "API SDK for Python\", long_description=long_description, license='Apache License 2.0', packages=[ \"facebookbot\", \"facebookbot.models\" ], install_requires=_requirements(),", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "Version 2.0 (the \"License\"); you may # not use this file except in", "under the License. import re import sys from setuptools import setup from setuptools.command.test" ]
[ "affiliates. # Copyright (c) HuggingFace Inc. team. # # Licensed under the Apache", "it as a regular PyTorch Module and refer to the PyTorch documentation for", "Unless required by applicable law or agreed to in writing, software # distributed", "head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor`", "= self.classifier(pooled_output) loss = None if labels is not None: if self.num_labels ==", "<NAME>. It's a supervised multimodal bitransformer model that fuses information from text and", "all matter related to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration", "= input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal,", "information from text and other image encoders, and obtain state-of-the-art performance on various", "its model (such as downloading or saving, resizing the input embeddings, pruning heads", "return_dict=None, ): r\"\"\" Returns: Examples:: # For example purposes. Not runnable. transformer =", "if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if", "tokens in the vocabulary. It does not expect [CLS] token to be added", "- 0 corresponds to a *sentence A* token, - 1 corresponds to a", "= (logits,) + outputs[2:] return ((loss,) + output) if loss is not None", "self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids,", "from these tokens will be summed with the respective token embeddings for the", "the weighted average in the self-attention heads. Examples: ```python # For example purposes.", "Segment token indices to indicate first and second portions of the inputs. Indices", "in the cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor`", "Examples: ```python # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder =", "control over how to convert `input_ids` indices into associated vectors than the model's", "are **not masked**, - 0 for tokens that are **masked**. [What are attention", "None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else", "appended to the end of other modality embeddings. Indices can be obtained using", "self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings =", "= self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was", "of other modality embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and", "matter related to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class", "*optional*): Whether or not to return the hidden states of all layers. See", "is configured as a decoder. Mask values selected in `[0, 1]`: - 1", "various elements depending on the configuration (config) and inputs: **loss**: (*optional*, returned when", "and its affiliates. # Copyright (c) HuggingFace Inc. team. # # Licensed under", "bitransformer model that fuses information from text and other image encoders, and obtain", "= None if labels is not None: if self.num_labels == 1: # We", "*optional*): Indices of positions of each input sequence tokens in the position embeddings", "nullify selected heads of the self-attention modules. Mask values selected in `[0, 1]`:", "def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT", "not load the weights associated with the model, only the configuration. transformer (:class:", "**loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or", "\"\"\"Generic Modal Embeddings which takes in an encoder, and a transformer embedding.\"\"\" def", "= encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:]", "`attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not", "an Image Encoder, the shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor`", "= token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "more control over how to convert `input_ids` indices into associated vectors than the", "regular PyTorch Module and refer to the PyTorch documentation for all matter related", "not to return a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings( \"The", "Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`)", "if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if", "def __init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings", "= MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions is not None", "**masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment", "extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs", "not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1)", "of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of", "indicate first and second portions of the inputs. Indices are selected in `[0,", "of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of", "logits = self.classifier(pooled_output) loss = None if labels is not None: if self.num_labels", "masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids", "Hidden-states of the model at the output of each layer plus the initial", "the same time\") elif input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds", "for all its model (such as downloading or saving, resizing the input embeddings,", "loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not", "add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from", "computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on the configuration (config) and", "modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings(", "`output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of each layer + the", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "PyTorch Module and refer to the PyTorch documentation for all matter related to", "selected in `[0, 1]`: - 0 corresponds to a *sentence A* token, -", "token to be added to Other Modality Embedding. [CLS] Most commonly used for", "input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, )", "to the PyTorch documentation for all matter related to general usage and behavior.", "forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if", "are **masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors", "input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to be", "sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) +", "Optionally, instead of passing `input_ids` you can choose to directly pass an embedded", "be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of", "= embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self,", "self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the head", "encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "to compute the weighted average in the self-attention heads. Examples: ```python # For", "pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it", "sequence_length)`: Segment token indices to indicate first and second portions of the inputs.", "1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else:", "is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on the configuration (config)", "def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with a sequence", "details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start", "self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings =", "config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1)", "first and second portions of the inputs. Indices are selected in `[0, 1]`:", "does not load the weights associated with the model, only the configuration. transformer", "tokens in the position embeddings for the non-text modality. Selected in the range", "position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device", "attentions tensors of all attention layers. See `attentions` under returned tensors for more", "doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss()", "None and inputs_embeds is not None: raise ValueError(\"You cannot specify both input_ids and", "== 1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1`", "for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised", "shape that the encoder for that type expects. e.g. With an Image Encoder,", "= inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either input_ids or inputs_embeds\") device", "`(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax, used to compute", "modal_sequence_length)`: Segment token indices to indicate different portions of the non-text modality. The", "detail. return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of", "for the output of each layer + the output of the embeddings) of", "`hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not", "position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input", "modality data. It will be the shape that the encoder for that type", "`torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax).", "*optional*): Whether or not to return the attentions tensors of all attention layers.", "= embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout =", "only the configuration. transformer (:class: *~nn.Module*): A text transformer that is used by", "seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device )", "hidden-states without any specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def", "PyTorch documentation for all matter related to general usage and behavior. Parameters: config", "comprising various elements depending on the configuration (config) and inputs: **loss**: (*optional*, returned", "ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer = transformer", "input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the", "= self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states,", "elements depending on the configuration (config) and inputs: **loss**: (*optional*, returned when `labels`", "the cross-attention if the model is configured as a decoder. Mask values selected", "are **not masked**, - 0 for tokens that are **masked**. output_attentions (`bool`, *optional*):", "position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length),", "obtain state-of-the-art performance on various multimodal classification benchmark tasks. This model inherits from", "@replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None,", "return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss is", "under the License. \"\"\"PyTorch MMBT model. \"\"\" import torch from torch import nn", "position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples::", "token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds", "encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device)", "not use this file except in compliance with the License. # You may", "expects. e.g. With an Image Encoder, the shape would be (batch_size, channels, height,", "from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs", "is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask,", "if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape,", "head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict is not None", "to Other Modality Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor`", "or inputs_embeds\") device = input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings", "Attentions weights after the attention softmax, used to compute the weighted average in", "the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions", "modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1]", "that is used by MMBT. It should have embeddings, encoder, and pooler attributes.", "encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the", "Optional end token to be added to Other Modality Embedding. [SEP] Most commonly", "+= 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds", "**not masked**, - 0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "with a sequence classification/regression head on top (a linear layer on top of", "the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`,", "agreed to in writing, software # distributed under the License is distributed on", "token, - 1 corresponds to a *sentence B* token. [What are token type", "second modality. It should take in a batch of modal inputs and return", "encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device),", "See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of", "or saving, resizing the input embeddings, pruning heads etc.) This model is also", "is used by MMBT. It should have embeddings, encoder, and pooler attributes. encoder", "position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position", "with the respective token embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape", "is **not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of", "provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor`", "to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor`", "not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output) if loss", "model is configured as a decoder. Mask values selected in `[0, 1]`: -", "token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings", "(`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary.", "(`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input sequence", "token to be added as it's appended to the end of other modality", "embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the output", "that are **not masked**, - 0 for tokens that are **masked**. [What are", "regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of", "# We are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else:", "Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\"", "transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None,", "for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the", "modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits =", "- 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss),", "as it's appended to the end of other modality embeddings. Indices can be", "for tokens that are **not masked**, - 0 for tokens that are **masked**.", "to in writing, software # distributed under the License is distributed on an", "inputs and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal", "`[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size,", "implied. # See the License for the specific language governing permissions and #", "`(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)`", "not None and inputs_embeds is not None: raise ValueError(\"You cannot specify both input_ids", "else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape,", "labels=None, return_dict=None, ): return_dict = return_dict if return_dict is not None else self.config.use_return_dict", "a config file does not load the weights associated with the model, only", ") def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\"", "of shape `(batch_size, sequence_length)`: Mask to avoid performing attention on padding token indices.", "input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output", "token to be added to Other Modality Embedding. [SEP] Most commonly used. attention_mask", "HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the", "(`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads", "fuses information from text and other image encoders, and obtain state-of-the-art performance on", "that are **not masked**, - 0 for tokens that are **masked**. output_attentions (`bool`,", "return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with", "See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or", "coding=utf-8 # Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace", "encoder input. This mask is used in the cross-attention if the model is", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward(", "= model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer,", "takes in an encoder, and a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings):", "Initializing with a config file does not load the weights associated with the", "encoder. Used in the cross-attention if the model is configured as a decoder.", "[`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape", "added to Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of", "if the model is configured as a decoder. Mask values selected in `[0,", "input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output,", "= self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states,", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "an encoder, and a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config", "embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details.", "= outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt", "embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in [Supervised Multimodal Bitransformers for", "encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if", "= BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions =", "r\"\"\" MMBT model was proposed in [Supervised Multimodal Bitransformers for Classifying Images and", "It does not expect [CLS] token to be added as it's appended to", "are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct =", "of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can", "output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if", "`torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights", "not None: raise ValueError(\"You cannot specify both input_ids and inputs_embeds at the same", "labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict:", "__init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder)", "`(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the", "methods the library implements for all its model (such as downloading or saving,", "the input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module)", "example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config,", "position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to", "(`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data. It will be the", "instead of passing `input_ids` you can choose to directly pass an embedded representation.", "transformer that is used by MMBT. It should have embeddings, encoder, and pooler", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "vocabulary. It does not expect [CLS] token to be added as it's appended", "the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of", "0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`,", "portions of the non-text modality. The embeddings from these tokens will be summed", "encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings =", "at the output of each layer plus the initial embedding outputs. **attentions**: (*optional*,", "resizing the input embeddings, pruning heads etc.) This model is also a PyTorch", "<NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer model that fuses information", "seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids", "config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def", "None if labels is not None: if self.num_labels == 1: # We are", "after the attention softmax, used to compute the weighted average in the self-attention", "ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions is", "self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask,", "`config.num_labels > 1` a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various", "the library implements for all its model (such as downloading or saving, resizing", "start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token", "return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in [Supervised Multimodal Bitransformers", "downloading or saving, resizing the input embeddings, pruning heads etc.) This model is", "input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids,", "is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)],", "a decoder. Mask values selected in `[0, 1]`: - 1 for tokens that", "indices to indicate different portions of the non-text modality. The embeddings from these", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "all the parameters of the model. Initializing with a config file does not", "Use it as a regular PyTorch Module and refer to the PyTorch documentation", "Encoder for the second modality. It should take in a batch of modal", "attributes. encoder (:class: *~nn.Module*): Encoder for the second modality. It should take in", "not to return the attentions tensors of all attention layers. See `attentions` under", "else self.config.use_return_dict if input_ids is not None and inputs_embeds is not None: raise", "*optional*): Indices of positions of each input sequence tokens in the position embeddings.", "runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\"", "directly pass an embedded representation. This is useful if you want more control", "related to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with", "config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`,", "that fuses information from text and other image encoders, and obtain state-of-the-art performance", "for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not to return", "of each input sequence tokens in the position embeddings for the non-text modality.", "input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1]", "the head is **not masked**, - 0 indicates the head is **masked**. inputs_embeds", "tokens that are **not masked**, - 0 for tokens that are **masked**. [What", "CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput", "the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor`", "return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, )", "(logits,) + outputs[2:] return ((loss,) + output) if loss is not None else", "pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings =", "Mask values selected in `[0, 1]`: - 1 for tokens that are **not", "torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids", "attention on padding token indices. Mask values selected in `[0, 1]`: - 1", "value @add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression head on top (a", "list of `torch.FloatTensor` (one for the output of each layer + the output", "sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings -", "MMBT model was proposed in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt)", "= r\"\"\" MMBT model was proposed in [Supervised Multimodal Bitransformers for Classifying Images", "embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly pass", "selected in `[0, 1]`: - 1 indicates the head is **not masked**, -", "if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings =", "input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings", "pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if", "at the output of the last layer of the encoder. Used in the", "for the generic methods the library implements for all its model (such as", "encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob)", "loss = None if labels is not None: if self.num_labels == 1: #", "torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if", "attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None:", "embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model", "the output of each layer + the output of the embeddings) of shape", "Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask", "last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings", "shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*,", "have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second", "in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor`", "import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling,", "`(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask", "token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask", "values selected in `[0, 1]`: - 1 indicates the head is **not masked**,", "in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor`", "token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT", "head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead", "- 1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids)", "encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not", "of shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape", "modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns:", "r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data. It", "embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in", "a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of", "cannot specify both input_ids and inputs_embeds at the same time\") elif input_ids is", "Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence A*", "by MMBT. It should have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*):", "you want more control over how to convert `input_ids` indices into associated vectors", "transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions", "weighted average in the self-attention heads. Examples: ```python # For example purposes. Not", "import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class", "(`bool`, *optional*): Whether or not to return the attentions tensors of all attention", "end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not", "each layer plus the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list", "`(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding token indices", "nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None,", "pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return", "We are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct", "data. It will be the shape that the encoder for that type expects.", "attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*):", "scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for", "Encoder, the shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape", "embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output", "See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or", "Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size,", "= return_dict if return_dict is not None else self.config.use_return_dict if input_ids is not", "if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states", "Model configuration class with all the parameters of the model. Initializing with a", "if return_dict is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens,", "transformer, encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder,", "(*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices", "not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions,", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either input_ids", "using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens", "is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed", "end token to be added to Other Modality Embedding. [SEP] Most commonly used.", "else: raise ValueError(\"You have to specify either input_ids or inputs_embeds\") device = input_ids.device", "text and other image encoders, and obtain state-of-the-art performance on various multimodal classification", "from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging", "in `[0, 1]`: - 0 corresponds to a *sentence A* token, - 1", "was proposed in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>,", "the encoder for that type expects. e.g. With an Image Encoder, the shape", "loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). Returns: *Tuple*", "either input_ids or inputs_embeds\") device = input_ids.device if input_ids is not None else", "runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs", "classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for", "is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings],", "input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ):", "import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings", "encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask =", "regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss", "See the License for the specific language governing permissions and # limitations under", "the License. \"\"\"PyTorch MMBT model. \"\"\" import torch from torch import nn from", "token indices to indicate different portions of the non-text modality. The embeddings from", "the last layer of the encoder. Used in the cross-attention if the model", "== 1: # We are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1),", "((loss,) + output) if loss is not None else output return SequenceClassifierOutput( loss=loss,", "`config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels >", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "\"\"\"PyTorch MMBT model. \"\"\" import torch from torch import nn from torch.nn import", "modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict is not", "cross-attention if the model is configured as a decoder. Mask values selected in", "Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification", "for the non-text modality. Selected in the range `[0, config.max_position_embeddings - 1]`. [What", "self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None,", "token_embeddings], dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1", "(`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output", "usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all the parameters", "0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor`", "embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat(", "pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of", "the PyTorch documentation for all matter related to general usage and behavior. Parameters:", ") class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config = config", "= r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data.", "modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if", "`output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length,", "Image Encoder, the shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of", "Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`:", "of input sequence tokens in the vocabulary. It does not expect [CLS] token", "of positions of each input sequence tokens in the position embeddings. Selected in", "the output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the", "= transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self,", "be added to Other Modality Embedding. [CLS] Most commonly used for classification tasks.", "`(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention", "governing permissions and # limitations under the License. \"\"\"PyTorch MMBT model. \"\"\" import", "(c) HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "of each layer plus the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`)", "Embeddings which takes in an encoder, and a transformer embedding.\"\"\" def __init__(self, config,", "= self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output,", "seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length +=", "self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict,", "masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to indicate", "corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*)", "is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either", "of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the output of", "heads of the self-attention modules. Mask values selected in `[0, 1]`: - 1", "a batch of modal inputs and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING", "heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as", "None: raise ValueError(\"You cannot specify both input_ids and inputs_embeds at the same time\")", "def __init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder", "of the non-text modality. The embeddings from these tokens will be summed with", "the respective token embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size,", "shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the", "is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids,", "token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings", "transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal,", "device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device)", "super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier", "Whether or not to return the attentions tensors of all attention layers. See", "SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC", "self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not", "= CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,)", "other modality data. It will be the shape that the encoder for that", "(sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self):", "mask is used in the cross-attention if the model is configured as a", "that the encoder for that type expects. e.g. With an Image Encoder, the", "SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output", "encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second modality. It", "to be added as it's appended to the end of other modality embeddings.", "KIND, either express or implied. # See the License for the specific language", "transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def", "shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input sequence tokens in", "- 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length,", "the non-text modality. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are", "= config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings", "encoder) \"\"\" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states", "output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape", "- 1 indicates the head is **not masked**, - 0 indicates the head", "dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask,", "the self-attention heads. Examples: ```python # For example purposes. Not runnable. transformer =", "be added as it's appended to the end of other modality embeddings. Indices", "This mask is used in the cross-attention if the model is configured as", "+ encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings", "`(batch_size, ***)`): The other modality data. It will be the shape that the", "None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else", "(or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or", "shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. It does", "outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels is", "import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils", "= self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids", "ANY KIND, either express or implied. # See the License for the specific", "modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output)", "sequence tokens in the position embeddings for the non-text modality. Selected in the", "of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*)", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a", "modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to be added to", "not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids,", "to return a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings( \"The bare", "encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output", "modal_sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the position", "and inputs_embeds at the same time\") elif input_ids is not None: input_txt_shape =", "import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin", "= ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions", "\"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other", "a regular PyTorch Module and refer to the PyTorch documentation for all matter", "BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids,", "configured as a decoder. Mask values selected in `[0, 1]`: - 1 for", "model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*):", "= MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2]", "tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return a", "torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape,", "`(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last", "config, encoder, embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size,", "not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None", "to a *sentence A* token, - 1 corresponds to a *sentence B* token.", "embeddings for the non-text modality. Selected in the range `[0, config.max_position_embeddings - 1]`.", "for that type expects. e.g. With an Image Encoder, the shape would be", "raw hidden-states without any specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin):", "= outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels", "`[0, 1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds", "of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to", "(such as downloading or saving, resizing the input embeddings, pruning heads etc.) This", "Copyright (c) HuggingFace Inc. team. # # Licensed under the Apache License, Version", "embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob)", "loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))", ") if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat(", "model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*):", "= torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 )", "sequence_length)`, *optional*): Mask to avoid performing attention on the padding token indices of", "class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the", "internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence", "None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if", "CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) +", "output of the last layer of the encoder. Used in the cross-attention if", "outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask,", "# limitations under the License. \"\"\"PyTorch MMBT model. \"\"\" import torch from torch", "`(batch_size,)`, *optional*): Optional end token to be added to Other Modality Embedding. [SEP]", "self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict if", "if labels is not None: if self.num_labels == 1: # We are doing", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output", "the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally,", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "sequence_length, hidden_size)`: Hidden-states of the model at the output of each layer plus", "the model. Initializing with a config file does not load the weights associated", "ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None,", "embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The", "input_ids is not None and inputs_embeds is not None: raise ValueError(\"You cannot specify", "attention on the padding token indices of the encoder input. This mask is", "applicable law or agreed to in writing, software # distributed under the License", "self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask,", "self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None,", "pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling(", "embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each", "to be added to Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*)", "loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:] return ((loss,)", "is not None: input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape =", "torch import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward,", "None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if", "BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value):", "Modal Embeddings which takes in an encoder, and a transformer embedding.\"\"\" def __init__(self,", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "can choose to directly pass an embedded representation. This is useful if you", "obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids)", "(input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings =", "config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm", "It should take in a batch of modal inputs and return k, n", "1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices", "= output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states", "is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not", "writing, software # distributed under the License is distributed on an \"AS IS\"", "n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size,", "outputting raw hidden-states without any specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module,", "loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions, )", "device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask,", "will be summed with the respective token embeddings for the non-text modality. position_ids", "Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1`", "Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace Inc. team. # #", "None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is", "= self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings", "\"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`:", "compliance with the License. # You may obtain a copy of the License", "tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states without any specific", "or not to return the attentions tensors of all attention layers. See `attentions`", ") position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings +", "than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length,", "return_dict is not None else self.config.use_return_dict if input_ids is not None and inputs_embeds", "is not None and inputs_embeds is not None: raise ValueError(\"You cannot specify both", "the attention softmax, used to compute the weighted average in the self-attention heads.", "Model with a sequence classification/regression head on top (a linear layer on top", "self.classifier(pooled_output) loss = None if labels is not None: if self.num_labels == 1:", "return_dict = return_dict if return_dict is not None else self.config.use_return_dict if input_ids is", "would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices", "sequence_length)`: Attentions weights after the attention softmax, used to compute the weighted average", "token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\"", "hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value", "Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a", "This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular", "modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to be added to", "input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape", "Modality Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape", "modality embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for", "regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss", "config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings =", "*optional*): Optionally, instead of passing `input_ids` you can choose to directly pass an", "1]`: - 0 corresponds to a *sentence A* token, - 1 corresponds to", "token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to indicate first", "tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to be added", "encoder, embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size)", "computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels -", "will be the shape that the encoder for that type expects. e.g. With", "(the \"License\"); # you may not use this file except in compliance with", "load the weights associated with the model, only the configuration. transformer (:class: *~nn.Module*):", "`[0, 1]`: - 1 indicates the head is **not masked**, - 0 indicates", "to be added to Other Modality Embedding. [CLS] Most commonly used for classification", "shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can choose", "# Unless required by applicable law or agreed to in writing, software #", "are selected in `[0, 1]`: - 0 corresponds to a *sentence A* token,", "different portions of the non-text modality. The embeddings from these tokens will be", "output) if loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states,", "by applicable law or agreed to in writing, software # distributed under the", "inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1]", "in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss", "the cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of", "input sequence tokens in the position embeddings for the non-text modality. Selected in", "seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None:", "layer on top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module):", "token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token)", "file except in compliance with the License. # You may obtain a copy", "token indices. Mask values selected in `[0, 1]`: - 1 for tokens that", "bare MMBT Model outputting raw hidden-states without any specific head on top.\", MMBT_START_DOCSTRING,", "= loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:] return", "[What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token", "for the specific language governing permissions and # limitations under the License. \"\"\"PyTorch", "head on top (a linear layer on top of the pooled output) \"\"\",", "performing attention on the padding token indices of the encoder input. This mask", "loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on the configuration", "loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels ==", "and # limitations under the License. \"\"\"PyTorch MMBT model. \"\"\" import torch from", "modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None,", "Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder)", "r\"\"\" Returns: Examples:: # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder", "Mask to avoid performing attention on padding token indices. Mask values selected in", "# For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt", "return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs = self.mmbt(", "purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer,", "Mask values selected in `[0, 1]`: - 1 indicates the head is **not", "input sequence tokens in the vocabulary. It does not expect [CLS] token to", "self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with a", "a supervised multimodal bitransformer model that fuses information from text and other image", "self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in [Supervised Multimodal", "over how to convert `input_ids` indices into associated vectors than the model's internal", "loss, logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels =", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict", "output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return", "expect [CLS] token to be added as it's appended to the end of", "(*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions", "Examples:: # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args)", "as a regular PyTorch Module and refer to the PyTorch documentation for all", "is used in the cross-attention if the model is configured as a decoder.", "specify both input_ids and inputs_embeds at the same time\") elif input_ids is not", "+= 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids =", "`(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices should be in `[0,", "avoid performing attention on the padding token indices of the encoder input. This", "= ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss,", "super().__init__() self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings =", "the model, only the configuration. transformer (:class: *~nn.Module*): A text transformer that is", "labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) + output)", "output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is", "cross-attention if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape", "if input_ids is not None and inputs_embeds is not None: raise ValueError(\"You cannot", "replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import", "input sequence tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings", "torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask =", "= embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None,", "added as it's appended to the end of other modality embeddings. Indices can", "lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states", "tokens will be summed with the respective token embeddings for the non-text modality.", "encoder, and a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config =", "embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings", "each input sequence tokens in the position embeddings. Selected in the range `[0,", "return_dict is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens,", "is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of", "+ output) if loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits,", "raise ValueError(\"You cannot specify both input_ids and inputs_embeds at the same time\") elif", "output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For example purposes. Not runnable. transformer", "MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions is not None else", "its affiliates. # Copyright (c) HuggingFace Inc. team. # # Licensed under the", "`torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to indicate first and second", "torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import", "r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence classification/regression", "MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels),", "= config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size,", "1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds =", "are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token", "a *sentence A* token, - 1 corresponds to a *sentence B* token. [What", "(`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the", "<NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer model that fuses information from", "positions of each input sequence tokens in the position embeddings. Selected in the", "type expects. e.g. With an Image Encoder, the shape would be (batch_size, channels,", "= return_dict if return_dict is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal,", "file does not load the weights associated with the model, only the configuration.", "indices to indicate first and second portions of the inputs. Indices are selected", "tokens that are **not masked**, - 0 for tokens that are **masked**. output_attentions", "with the model, only the configuration. transformer (:class: *~nn.Module*): A text transformer that", "None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask],", "take in a batch of modal inputs and return k, n dimension embeddings.", "and inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`:", "non-text modality. The embeddings from these tokens will be summed with the respective", "associated vectors than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape", "outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self, config,", "embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None,", "MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in [Supervised Multimodal Bitransformers for Classifying", "hidden_size)`: Hidden-states of the model at the output of each layer plus the", "token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output =", "the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the", "of the encoder. Used in the cross-attention if the model is configured as", "MMBT Model outputting raw hidden-states without any specific head on top.\", MMBT_START_DOCSTRING, )", "= torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask", "which takes in an encoder, and a transformer embedding.\"\"\" def __init__(self, config, encoder,", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer", "return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output,", "to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all", "*~nn.Module*): A text transformer that is used by MMBT. It should have embeddings,", "from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements", "mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions if output_attentions is not", "from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic", "1` a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a", "self-attention heads. Examples: ```python # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased')", "same time\") elif input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds is", "by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer model that fuses", "return_dict if return_dict is not None else self.config.use_return_dict if input_ids is not None", "input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None:", "of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**:", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the", "computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy).", "1` a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending", "self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None,", "model (such as downloading or saving, resizing the input embeddings, pruning heads etc.)", "embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder = encoder", "device = input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder(", "pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels is not", "embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second modality.", "is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**:", "the output of each layer plus the initial embedding outputs. **attentions**: (*optional*, returned", "start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids", "to avoid performing attention on the padding token indices of the encoder input.", "self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None):", "when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of each layer +", "of hidden-states at the output of the last layer of the encoder. Used", "supervised multimodal bitransformer model that fuses information from text and other image encoders,", "decoder. Mask values selected in `[0, 1]`: - 1 for tokens that are", "self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings", "a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model", "model was proposed in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by", "Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What", "import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger =", "self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout", "be added to Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor`", "return the attentions tensors of all attention layers. See `attentions` under returned tensors", "shape `(batch_size, ***)`): The other modality data. It will be the shape that", "the License for the specific language governing permissions and # limitations under the", "start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is", "(Cross-Entropy). Returns: *Tuple* comprising various elements depending on the configuration (config) and inputs:", "configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to", "Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's", "(c) Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace Inc. team. #", "with all the parameters of the model. Initializing with a config file does", "when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads,", "input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None,", "= self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape", "token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices", "initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for", "loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification loss is", "(Mean-Square loss), If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). Returns:", "1: # We are doing regression loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1))", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to", "if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict", "logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels", "**not masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape", "not None: if self.num_labels == 1: # We are doing regression loss_fct =", "input_ids and inputs_embeds at the same time\") elif input_ids is not None: input_txt_shape", "shape `(batch_size,)`, *optional*): Optional end token to be added to Other Modality Embedding.", "limitations under the License. \"\"\"PyTorch MMBT model. \"\"\" import torch from torch import", "without any specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self,", "in the self-attention heads. Examples: ```python # For example purposes. Not runnable. transformer", "*optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple.", "and second portions of the inputs. Indices are selected in `[0, 1]`: -", "position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if", "on top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\"", "[What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token", "This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods", "device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings],", "the position embeddings for the non-text modality. Selected in the range `[0, config.max_position_embeddings", "you can choose to directly pass an embedded representation. This is useful if", "A* token, - 1 corresponds to a *sentence B* token. [What are token", "\"The bare MMBT Model outputting raw hidden-states without any specific head on top.\",", "shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence tokens in", "a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing", "Version 2.0 (the \"License\"); # you may not use this file except in", "ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module):", "For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt =", "modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict", "@add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states without any specific head on", "selected in `[0, 1]`: - 1 for tokens that are **not masked**, -", "self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None:", "positions of each input sequence tokens in the position embeddings for the non-text", "IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify", "sequence_length, sequence_length)`: Attentions weights after the attention softmax, used to compute the weighted", "= embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask =", "output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = (", "from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC =", "[CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*):", "image encoders, and obtain state-of-the-art performance on various multimodal classification benchmark tasks. This", "dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings +", "(:class: *~nn.Module*): Encoder for the second modality. It should take in a batch", "all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*):", "BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions = output_attentions", "class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer", "general usage and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all the", "useful if you want more control over how to convert `input_ids` indices into", "on the padding token indices of the encoder input. This mask is used", "None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape", "- 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers,", "are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask", "from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils", "self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output,", "= nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length", "sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can choose to directly", "in the position embeddings for the non-text modality. Selected in the range `[0,", "plus the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor`", "self.config.use_return_dict if input_ids is not None and inputs_embeds is not None: raise ValueError(\"You", "labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels", "softmax, used to compute the weighted average in the self-attention heads. Examples: ```python", "[torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape,", "not expect [CLS] token to be added as it's appended to the end", "used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing attention", "for more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`]", "This is useful if you want more control over how to convert `input_ids`", "*sentence A* token, - 1 corresponds to a *sentence B* token. [What are", "on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__()", "= self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed in [Supervised", "in a batch of modal inputs and return k, n dimension embeddings. \"\"\"", "should take in a batch of modal inputs and return k, n dimension", "inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For example", "weights after the attention softmax, used to compute the weighted average in the", "attention softmax, used to compute the weighted average in the self-attention heads. Examples:", "any specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config,", "and inputs_embeds is not None: raise ValueError(\"You cannot specify both input_ids and inputs_embeds", "can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are", "return a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT", "how to convert `input_ids` indices into associated vectors than the model's internal embedding", "layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`, *optional*): Whether", "Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid", "(`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you", "and a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config = config", "self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None,", "(`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of a plain", ") embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None:", "head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For", "A text transformer that is used by MMBT. It should have embeddings, encoder,", "embedded representation. This is useful if you want more control over how to", "as a decoder. Mask values selected in `[0, 1]`: - 1 for tokens", "under returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to", "super().__init__() self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING)", "Returns: Examples:: # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder =", "sequence_length)`): Indices of input sequence tokens in the vocabulary. It does not expect", "torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import", "torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids =", "Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer model that", "**logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before", "config file does not load the weights associated with the model, only the", "<gh_stars>1-10 # coding=utf-8 # Copyright (c) Facebook, Inc. and its affiliates. # Copyright", "[What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*):", "and obtain state-of-the-art performance on various multimodal classification benchmark tasks. This model inherits", "start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1),", "these tokens will be summed with the respective token embeddings for the non-text", "If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If `config.num_labels", "else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is", "outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer)", "of positions of each input sequence tokens in the position embeddings for the", "Segment token indices to indicate different portions of the non-text modality. The embeddings", "OF ANY KIND, either express or implied. # See the License for the", "be the shape that the encoder for that type expects. e.g. With an", "sequence tokens in the vocabulary. It does not expect [CLS] token to be", "torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output =", "else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids,", "modal inputs and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args:", "to directly pass an embedded representation. This is useful if you want more", "masked**, - 0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or", "ValueError(\"You cannot specify both input_ids and inputs_embeds at the same time\") elif input_ids", "be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input", "transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal,", "when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1)", "the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of", "output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if", "the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates the", "embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of", "top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**:", "[What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of", "portions of the inputs. Indices are selected in `[0, 1]`: - 0 corresponds", "`(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. It does not", "1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`,", "add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils", "None: if self.num_labels == 1: # We are doing regression loss_fct = MSELoss()", "`torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices should", "model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits =", "layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether", "(config) and inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape", "MMBT model. \"\"\" import torch from torch import nn from torch.nn import CrossEntropyLoss,", "in `[0, 1]`: - 1 indicates the head is **not masked**, - 0", "is not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not", "labels is not None: if self.num_labels == 1: # We are doing regression", "head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output)", "if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length += 1 token_embeddings =", "the non-text modality. The embeddings from these tokens will be summed with the", "\"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an encoder, and a", "(`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to be added to Other", "transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout =", "[torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to the", "self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token) seq_length", "([`MMBTConfig`]): Model configuration class with all the parameters of the model. Initializing with", "if not return_dict: return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states,", "embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings =", "purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer,", "under returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to", "if you want more control over how to convert `input_ids` indices into associated", "encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For example purposes. Not", "= modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings =", "MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels", "attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict", "is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids,", "non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of", "is not None: raise ValueError(\"You cannot specify both input_ids and inputs_embeds at the", "self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:] return ((loss,) +", "attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ):", "and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second modality. It should", "or agreed to in writing, software # distributed under the License is distributed", "**labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence classification/regression loss.", "start token to be added to Other Modality Embedding. [CLS] Most commonly used", "hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer of", "of passing `input_ids` you can choose to directly pass an embedded representation. This", "tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not to return the", "dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings,", "BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__)", "embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal,", "*sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape", "shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`):", "# For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model", "attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing attention on", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape =", "multimodal bitransformer model that fuses information from text and other image encoders, and", ") extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers)", "token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings)", "the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one", "License. # You may obtain a copy of the License at # #", "None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids,", "of each input sequence tokens in the position embeddings. Selected in the range", "encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None,", "to specify either input_ids or inputs_embeds\") device = input_ids.device if input_ids is not", "self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict,", "the attentions tensors of all attention layers. See `attentions` under returned tensors for", "( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict = return_dict", "input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None,", "Inc. and its affiliates. # Copyright (c) HuggingFace Inc. team. # # Licensed", "input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either input_ids or inputs_embeds\")", "(*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module", "Mask to avoid performing attention on the padding token indices of the encoder", "non-text modality. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position", "shape `(batch_size,)`, *optional*): Optional start token to be added to Other Modality Embedding.", "token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds )", "outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt =", "Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data. It will", "passing `input_ids` you can choose to directly pass an embedded representation. This is", "is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long),", "attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask", "self.config = config self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings", "transformer (:class: *~nn.Module*): A text transformer that is used by MMBT. It should", "of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input sequence tokens", "used in the cross-attention if the model is configured as a decoder. Mask", "and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor`", "nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm =", "inputs_embeds\") device = input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings =", "License, Version 2.0 (the \"License\"); # you may not use this file except", "states of all layers. See `hidden_states` under returned tensors for more detail. return_dict", "the padding token indices of the encoder input. This mask is used in", "set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression", "is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask],", "It should have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for", "(or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list", "config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)` or", "does not expect [CLS] token to be added as it's appended to the", "instead of a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw", "linear layer on top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, ) class", "self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None,", "tensors of all attention layers. See `attentions` under returned tensors for more detail.", "encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For example purposes.", "[What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment", "MMBT_INPUTS_DOCSTRING, ) class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for", "`input_ids` you can choose to directly pass an embedded representation. This is useful", "embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use", "of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax, used", "1]`: - 1 for tokens that are **not masked**, - 0 for tokens", "1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If", "Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask", "of shape `(batch_size, sequence_length)`: Segment token indices to indicate first and second portions", "@add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression head on top (a linear", "...modeling_utils import ModuleUtilsMixin from ...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\"", "output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict:", "sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at the output of the last layer", "classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels", "to indicate first and second portions of the inputs. Indices are selected in", "self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels is not None: if", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "to convert `input_ids` indices into associated vectors than the model's internal embedding lookup", "not None else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None", "[torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask)", "indicates the head is **not masked**, - 0 indicates the head is **masked**.", "a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer", "= BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal,", "is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids", "token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids,", "ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits", "in the vocabulary. It does not expect [CLS] token to be added as", "input embeddings, pruning heads etc.) This model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass.", "elif input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds is not None:", "plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states without any", "the shape that the encoder for that type expects. e.g. With an Image", "self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression head on", "MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self,", "detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of", "[CLS] token to be added as it's appended to the end of other", "it's appended to the end of other modality embeddings. Indices can be obtained", "various multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the superclass", "sequence classification/regression head on top (a linear layer on top of the pooled", "config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels)", "in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What", "self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, )", "or implied. # See the License for the specific language governing permissions and", "`(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the output of each layer", "other modality embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`]", "token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds =", "Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`,", "0 corresponds to a *sentence A* token, - 1 corresponds to a *sentence", "dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`):", "modality. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids)", "# Copyright (c) HuggingFace Inc. team. # # Licensed under the Apache License,", "indicate different portions of the non-text modality. The embeddings from these tokens will", "if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor`", "to Other Modality Embedding. [SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape", "height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens", "selected heads of the self-attention modules. Mask values selected in `[0, 1]`: -", "all its model (such as downloading or saving, resizing the input embeddings, pruning", "1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length,", "the specific language governing permissions and # limitations under the License. \"\"\"PyTorch MMBT", "pass an embedded representation. This is useful if you want more control over", "return ((loss,) + output) if loss is not None else output return SequenceClassifierOutput(", "= self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output =", "token indices of the encoder input. This mask is used in the cross-attention", "def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None,", "Inc. team. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "corresponds to a *sentence A* token, - 1 corresponds to a *sentence B*", "to return the attentions tensors of all attention layers. See `attentions` under returned", "for the second modality. It should take in a batch of modal inputs", "\"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states without any specific head", "tasks. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic", "+ token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\"", "the vocabulary. It does not expect [CLS] token to be added as it's", "language governing permissions and # limitations under the License. \"\"\"PyTorch MMBT model. \"\"\"", "modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict =", "depending on the configuration (config) and inputs: **loss**: (*optional*, returned when `labels` is", "use this file except in compliance with the License. # You may obtain", "model. Initializing with a config file does not load the weights associated with", ") return_dict = return_dict if return_dict is not None else self.config.use_return_dict if input_ids", "torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length +=", "= torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask", "nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None,", "shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size,", "of the self-attention modules. Mask values selected in `[0, 1]`: - 1 indicates", "the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`,", "a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on", "associated with the model, only the configuration. transformer (:class: *~nn.Module*): A text transformer", "1 for tokens that are **not masked**, - 0 for tokens that are", "return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self,", "embeddings from these tokens will be summed with the respective token embeddings for", "input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask", "seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings", "position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING =", "token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings =", "_CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an encoder,", "dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros(", "token indices to indicate first and second portions of the inputs. Indices are", "config, transformer, encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config,", "self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None,", "model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder):", "specify either input_ids or inputs_embeds\") device = input_ids.device if input_ids is not None", "indices into associated vectors than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor`", "= config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC)", "txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask = torch.ones(input_shape, device=device)", "device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask", "`(batch_size,)`, *optional*): Optional start token to be added to Other Modality Embedding. [CLS]", "Whether or not to return the hidden states of all layers. See `hidden_states`", "None else self.config.use_return_dict if input_ids is not None and inputs_embeds is not None:", "with a config file does not load the weights associated with the model,", "MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from", "embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "*optional*): Optional start token to be added to Other Modality Embedding. [CLS] Most", "loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores", "model, only the configuration. transformer (:class: *~nn.Module*): A text transformer that is used", "modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different", "= self.word_embeddings(start_token) seq_length += 1 token_embeddings = torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is", "- 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*):", "if input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens,", "= torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output", "input. This mask is used in the cross-attention if the model is configured", "(one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after", "Indices of positions of each input sequence tokens in the position embeddings. Selected", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings", "None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either input_ids or", "of the model at the output of each layer plus the initial embedding", "or not to return the hidden states of all layers. See `hidden_states` under", "not None: input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1]", "to indicate different portions of the non-text modality. The embeddings from these tokens", "e.g. With an Image Encoder, the shape would be (batch_size, channels, height, width)", "tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape", "each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention", "of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence tokens", "in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>,", "with the License. # You may obtain a copy of the License at", "output = (logits,) + outputs[2:] return ((loss,) + output) if loss is not", "License. \"\"\"PyTorch MMBT model. \"\"\" import torch from torch import nn from torch.nn", "output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of all", "config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one", "= position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long,", "device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0),", "in `[0, 1]`: - 1 for tokens that are **not masked**, - 0", "modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict", "model at the output of each layer plus the initial embedding outputs. **attentions**:", "padding token indices of the encoder input. This mask is used in the", "**masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions tensors of", "law or agreed to in writing, software # distributed under the License is", "@add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None,", "are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`:", "inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a *sentence", "more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states", "values selected in `[0, 1]`: - 1 for tokens that are **not masked**,", "batch of modal inputs and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING =", "return_dict if return_dict is not None else self.config.use_return_dict outputs = self.mmbt( input_modal=input_modal, input_ids=input_ids,", "self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal))", "def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1)", "shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at the output of each", "self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm = embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def", "in compliance with the License. # You may obtain a copy of the", "of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "[`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting", "in the cross-attention if the model is configured as a decoder. Mask values", "= embeddings.LayerNorm self.dropout = nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings", "encoders, and obtain state-of-the-art performance on various multimodal classification benchmark tasks. This model", "B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size,", "outputs[2:] return ((loss,) + output) if loss is not None else output return", "nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from", "padding token indices. Mask values selected in `[0, 1]`: - 1 for tokens", "class with all the parameters of the model. Initializing with a config file", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "if return_dict is not None else self.config.use_return_dict if input_ids is not None and", "to avoid performing attention on padding token indices. Mask values selected in `[0,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "(batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input", "refer to the PyTorch documentation for all matter related to general usage and", "width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in", "input_ids.device if input_ids is not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens,", "inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict is not None else", "+ outputs[2:] return ((loss,) + output) if loss is not None else output", "[`BertTokenizer`]. See [`PreTrainedTokenizer.encode`] and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor`", "attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings(", "the superclass documentation for the generic methods the library implements for all its", "ValueError(\"You have to specify either input_ids or inputs_embeds\") device = input_ids.device if input_ids", "`(batch_size, config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned", "config.num_labels)` Classification (or regression if config.num_labels==1) scores (before SoftMax). **hidden_states**: (*optional*, returned when", "ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an encoder, and a transformer embedding.\"\"\"", "= ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states ) return_dict =", "config ([`MMBTConfig`]): Model configuration class with all the parameters of the model. Initializing", "`torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of", "- 1 for tokens that are **not masked**, - 0 for tokens that", "*~nn.Module*): Encoder for the second modality. It should take in a batch of", "other image encoders, and obtain state-of-the-art performance on various multimodal classification benchmark tasks.", "a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder", "the shape would be (batch_size, channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size,", "commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing", "IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate", "1 corresponds to a *sentence B* token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids", "of all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states", "(`bool`, *optional*): Whether or not to return the hidden states of all layers.", "__init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder = encoder self.proj_embeddings =", "should have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder for the", "= self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None if labels is not None:", "): return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs =", "(*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices to indicate first and", "[Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>.", "`torch.FloatTensor` (one for the output of each layer + the output of the", "the end of other modality embeddings. Indices can be obtained using [`BertTokenizer`]. See", "sequence_length)`: Mask to avoid performing attention on padding token indices. Mask values selected", "should be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a", "Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,", "modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: #", "PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and refer to", "all attention layers. See `attentions` under returned tensors for more detail. output_hidden_states (`bool`,", "also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch Module and", "for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional", "sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the position", "avoid performing attention on padding token indices. Mask values selected in `[0, 1]`:", "weights associated with the model, only the configuration. transformer (:class: *~nn.Module*): A text", "inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss", "model inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the", "= logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in", "into associated vectors than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of", "from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings,", "num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules. Mask values", "the parameters of the model. Initializing with a config file does not load", "logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which", "None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "as downloading or saving, resizing the input embeddings, pruning heads etc.) This model", "inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification", "loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output = (logits,) + outputs[2:]", "list of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`:", "that are **masked**. output_attentions (`bool`, *optional*): Whether or not to return the attentions", "the configuration. transformer (:class: *~nn.Module*): A text transformer that is used by MMBT.", "= torch.cat([start_token_embeds.unsqueeze(1), token_embeddings], dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length", "type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to", "used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token", "token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if", "output of each layer plus the initial embedding outputs. **attentions**: (*optional*, returned when", "a regression loss is computed (Mean-Square loss), If `config.num_labels > 1` a classification", "self.encoder = encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings", "shape `(batch_size, sequence_length)`: Segment token indices to indicate first and second portions of", "= nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings = embeddings.word_embeddings self.LayerNorm", "+ the output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of", "return (sequence_output, pooled_output) + encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def", "for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of", "this file except in compliance with the License. # You may obtain a", "inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids`", "encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings)", "of each layer + the output of the embeddings) of shape `(batch_size, sequence_length,", "not None else self.config.use_return_dict if input_ids is not None and inputs_embeds is not", "= ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None,", "Returns: *Tuple* comprising various elements depending on the configuration (config) and inputs: **loss**:", "want more control over how to convert `input_ids` indices into associated vectors than", "regression if config.num_labels==1) loss. **logits**: `torch.FloatTensor` of shape `(batch_size, config.num_labels)` Classification (or regression", "shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions of the non-text", "self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0]", "generic methods the library implements for all its model (such as downloading or", "return the hidden states of all layers. See `hidden_states` under returned tensors for", "logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes", ") pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss = None", "> 1` a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements", "= encoder self.proj_embeddings = nn.Linear(config.modal_hidden_size, config.hidden_size) self.position_embeddings = embeddings.position_embeddings self.token_type_embeddings = embeddings.token_type_embeddings self.word_embeddings", "head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder):", "device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask", "modules. Mask values selected in `[0, 1]`: - 1 indicates the head is", "input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have", "self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape =", "the configuration (config) and inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor`", "token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings =", "example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config,", "multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the superclass documentation", "The other modality data. It will be the shape that the encoder for", "more detail. return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead", "token. [What are token type IDs?](../glossary#token-type-ids) modal_token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, modal_sequence_length)`:", "if self.num_labels == 1: # We are doing regression loss_fct = MSELoss() loss", "model. \"\"\" import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss", "output_attentions=None, output_hidden_states=None, return_dict=None, ): r\"\"\" Returns: Examples:: # For example purposes. Not runnable.", "position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape,", "else self.config.output_hidden_states ) return_dict = return_dict if return_dict is not None else self.config.use_return_dict", "None: input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else:", "state-of-the-art performance on various multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`].", "\"\"\" import torch from torch import nn from torch.nn import CrossEntropyLoss, MSELoss from", "forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None,", "pooler attributes. encoder (:class: *~nn.Module*): Encoder for the second modality. It should take", "attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output =", "choose to directly pass an embedded representation. This is useful if you want", "- 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*)", "output_attentions is not None else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is", "the generic methods the library implements for all its model (such as downloading", "torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask =", "masked**, - 0 indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size,", "performing attention on padding token indices. Mask values selected in `[0, 1]`: -", "commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end", "end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids", "k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape", "each input sequence tokens in the position embeddings for the non-text modality. Selected", "returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of shape `(batch_size,", "= self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs =", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "in an encoder, and a transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__()", "on the configuration (config) and inputs: **loss**: (*optional*, returned when `labels` is provided)", "(`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence", "encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on", "1]`: - 1 indicates the head is **not masked**, - 0 indicates the", "behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all the parameters of the", "be summed with the respective token embeddings for the non-text modality. position_ids (`torch.LongTensor`", "indicates the head is **masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*):", "transformer embedding.\"\"\" def __init__(self, config, encoder, embeddings): super().__init__() self.config = config self.encoder =", "classification/regression head on top (a linear layer on top of the pooled output)", "compute the weighted average in the self-attention heads. Examples: ```python # For example", "elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to", "the model is configured as a decoder. Mask values selected in `[0, 1]`:", "token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long,", "required by applicable law or agreed to in writing, software # distributed under", "the hidden states of all layers. See `hidden_states` under returned tensors for more", "of the encoder input. This mask is used in the cross-attention if the", "documentation for all matter related to general usage and behavior. Parameters: config ([`MMBTConfig`]):", "a sequence classification/regression head on top (a linear layer on top of the", "(:class: *~nn.Module*): A text transformer that is used by MMBT. It should have", "second portions of the inputs. Indices are selected in `[0, 1]`: - 0", "the encoder. Used in the cross-attention if the model is configured as a", "modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, encoder_hidden_states=None, encoder_attention_mask=None, output_attentions=None, output_hidden_states=None, return_dict=None,", "of shape `(batch_size,)`, *optional*): Optional start token to be added to Other Modality", "self.config = config self.transformer = transformer self.modal_encoder = ModalEmbeddings(config, encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling,", "```python # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args)", "Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder)", "matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size, sequence_length, hidden_size)`, *optional*): Sequence of hidden-states at", "as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid", "is not None else self.config.use_return_dict if input_ids is not None and inputs_embeds is", "*Tuple* comprising various elements depending on the configuration (config) and inputs: **loss**: (*optional*,", "of `torch.FloatTensor` (one for the output of each layer + the output of", "of shape `(batch_size, ***)`): The other modality data. It will be the shape", "and refer to the PyTorch documentation for all matter related to general usage", "proposed in [Supervised Multimodal Bitransformers for Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>,", "IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "summed with the respective token embeddings for the non-text modality. position_ids (`torch.LongTensor` of", "MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality", "dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask =", "text transformer that is used by MMBT. It should have embeddings, encoder, and", "Used in the cross-attention if the model is configured as a decoder. encoder_attention_mask", "not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1)", "encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if", ") class MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing", "end of other modality embeddings. Indices can be obtained using [`BertTokenizer`]. See [`PreTrainedTokenizer.encode`]", "class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an encoder, and a transformer", "for all matter related to general usage and behavior. Parameters: config ([`MMBTConfig`]): Model", "not to return the hidden states of all layers. See `hidden_states` under returned", "(*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of each", "config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout", "the encoder input. This mask is used in the cross-attention if the model", "raise ValueError(\"You have to specify either input_ids or inputs_embeds\") device = input_ids.device if", "= self.mmbt( input_modal=input_modal, input_ids=input_ids, modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds,", "encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self,", "None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings", "position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None:", "the output of the last layer of the encoder. Used in the cross-attention", "average in the self-attention heads. Examples: ```python # For example purposes. Not runnable.", "and behavior. Parameters: config ([`MMBTConfig`]): Model configuration class with all the parameters of", "position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None, ): return_dict = return_dict if return_dict is", "self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier =", "if loss is not None else output return SequenceClassifierOutput( loss=loss, logits=logits, hidden_states=outputs.hidden_states, attentions=outputs.attentions,", "return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\" Args: input_modal (`torch.FloatTensor` of", "position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings", "(*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of shape", "= self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is", "and [`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`,", "the inputs. Indices are selected in `[0, 1]`: - 0 corresponds to a", "and other image encoders, and obtain state-of-the-art performance on various multimodal classification benchmark", "encoder_attention_mask = torch.ones(input_shape, device=device) else: encoder_attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 )", "import nn from torch.nn import CrossEntropyLoss, MSELoss from ...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings", "**masked**. inputs_embeds (`torch.FloatTensor` of shape `(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing", "tokens in the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`.", "inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify either input_ids or inputs_embeds\") device =", "the model at the output of each layer plus the initial embedding outputs.", "at the same time\") elif input_ids is not None: input_txt_shape = input_ids.size() elif", "head_mask (`torch.FloatTensor` of shape `(num_heads,)` or `(num_layers, num_heads)`, *optional*): Mask to nullify selected", "__init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer = transformer self.modal_encoder =", "position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of", "inherits from [`PreTrainedModel`]. Check the superclass documentation for the generic methods the library", "output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model", "and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal bitransformer model", ") sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output) if not return_dict: return (sequence_output, pooled_output)", "= token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return", "of shape `(batch_size,)`, *optional*): Optional end token to be added to Other Modality", "channels, height, width) input_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`): Indices of input sequence", "indices of the encoder input. This mask is used in the cross-attention if", "are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions", "used to compute the weighted average in the self-attention heads. Examples: ```python #", "With an Image Encoder, the shape would be (batch_size, channels, height, width) input_ids", "(one for the output of each layer + the output of the embeddings)", "MMBT. It should have embeddings, encoder, and pooler attributes. encoder (:class: *~nn.Module*): Encoder", "end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings = torch.cat([token_embeddings,", "of shape `(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices should be", "(*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing attention on padding", "# Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace Inc.", "***)`): The other modality data. It will be the shape that the encoder", "nn.Dropout(p=config.hidden_dropout_prob) def forward(self, input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length =", "<NAME>, <NAME>. It's a supervised multimodal bitransformer model that fuses information from text", "# you may not use this file except in compliance with the License.", "value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression head", "layer plus the initial embedding outputs. **attentions**: (*optional*, returned when `output_attentions=True`) list of", "if the model is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size,", "get_input_embeddings(self): return self.embeddings.word_embeddings def set_input_embeddings(self, value): self.embeddings.word_embeddings = value @add_start_docstrings( \"\"\" MMBT Model", "that type expects. e.g. With an Image Encoder, the shape would be (batch_size,", "```\"\"\" def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config,", "(before SoftMax). **hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the", "time\") elif input_ids is not None: input_txt_shape = input_ids.size() elif inputs_embeds is not", "return_dict (`bool`, *optional*): Whether or not to return a [`~file_utils.ModelOutput`] instead of a", "[`PreTrainedModel`]. Check the superclass documentation for the generic methods the library implements for", "on various multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the", "It's a supervised multimodal bitransformer model that fuses information from text and other", "attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output", "each layer + the output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`:", "modality. It should take in a batch of modal inputs and return k,", "configuration. transformer (:class: *~nn.Module*): A text transformer that is used by MMBT. It", "both input_ids and inputs_embeds at the same time\") elif input_ids is not None:", "IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to be added", "position embeddings for the non-text modality. Selected in the range `[0, config.max_position_embeddings -", ") input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device)", "end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids =", "The embeddings from these tokens will be summed with the respective token embeddings", "is useful if you want more control over how to convert `input_ids` indices", "If `config.num_labels > 1` a classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising", "logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an", "encoder, transformer.embeddings) @add_start_docstrings_to_model_forward(MMBT_INPUTS_DOCSTRING) @replace_return_docstrings(output_type=BaseModelOutputWithPooling, config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None,", "for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the hidden", "): r\"\"\" Returns: Examples:: # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased')", "loss_fct = MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss =", "..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed", "convert `input_ids` indices into associated vectors than the model's internal embedding lookup matrix.", "+ position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING", "License for the specific language governing permissions and # limitations under the License.", "`labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if config.num_labels==1) loss.", "`[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape `(num_heads,)`", "txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1)", "`(batch_size, sequence_length)`: Mask to avoid performing attention on padding token indices. Mask values", "Mask to nullify selected heads of the self-attention modules. Mask values selected in", "attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1", "\"License\"); # you may not use this file except in compliance with the", "from text and other image encoders, and obtain state-of-the-art performance on various multimodal", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "representation. This is useful if you want more control over how to convert", "Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional", "`torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to avoid performing attention on padding token", "input_txt_shape = input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise", "token embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*):", "encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels)", "not None else inputs_embeds.device modal_embeddings = self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, )", "head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions,", "team. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "It will be the shape that the encoder for that type expects. e.g.", "`(batch_size, sequence_length)`: Segment token indices to indicate first and second portions of the", "to nullify selected heads of the self-attention modules. Mask values selected in `[0,", "self.modal_encoder( input_modal, start_token=modal_start_tokens, end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is", "that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size,", "return_dict=None, ): return_dict = return_dict if return_dict is not None else self.config.use_return_dict outputs", "end_token=modal_end_tokens, position_ids=modal_position_ids, token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids =", "position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if", "\"\"\" MMBT Model with a sequence classification/regression head on top (a linear layer", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids)", "heads. Examples: ```python # For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder", "top (a linear layer on top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING,", "`(batch_size, sequence_length)`, *optional*): Indices of positions of each input sequence tokens in the", "Model outputting raw hidden-states without any specific head on top.\", MMBT_START_DOCSTRING, ) class", "to the end of other modality embeddings. Indices can be obtained using [`BertTokenizer`].", "= torch.cat( [torch.ones(input_modal_shape, device=device), encoder_attention_mask], dim=1 ) extended_attention_mask = self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask", "if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids,", "a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states without", "position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits", "for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of", "...file_utils import add_start_docstrings, add_start_docstrings_to_model_forward, replace_return_docstrings from ...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import", "= torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length) if token_type_ids is None: token_type_ids", "output of each layer + the output of the embeddings) of shape `(batch_size,", "[`PreTrainedTokenizer.__call__`] for details. [What are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*):", "attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape, device=device,", "*optional*): Mask to nullify selected heads of the self-attention modules. Mask values selected", "= torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids)", "inputs_embeds=inputs_embeds ) embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is", "classification loss is computed (Cross-Entropy). Returns: *Tuple* comprising various elements depending on the", "`[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is", "= nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None,", "encoder for that type expects. e.g. With an Image Encoder, the shape would", "of all layers. See `hidden_states` under returned tensors for more detail. return_dict (`bool`,", "input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None, head_mask=None, inputs_embeds=None, labels=None, return_dict=None,", "= self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None: start_token_embeds = self.word_embeddings(start_token)", "modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`, *optional*): Indices of positions of each", "inputs_embeds at the same time\") elif input_ids is not None: input_txt_shape = input_ids.size()", "configuration (config) and inputs: **loss**: (*optional*, returned when `labels` is provided) `torch.FloatTensor` of", "top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config", "(`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to be added to Other", "head_mask=head_mask, encoder_hidden_states=encoder_hidden_states, encoder_attention_mask=encoder_extended_attention_mask, output_attentions=output_attentions, output_hidden_states=output_hidden_states, return_dict=return_dict, ) sequence_output = encoder_outputs[0] pooled_output = self.transformer.pooler(sequence_output)", "...utils import logging logger = logging.get_logger(__name__) _CONFIG_FOR_DOC = \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal", "modal_start_tokens=modal_start_tokens, modal_end_tokens=modal_end_tokens, attention_mask=attention_mask, token_type_ids=token_type_ids, modal_token_type_ids=modal_token_type_ids, position_ids=position_ids, modal_position_ids=modal_position_ids, head_mask=head_mask, inputs_embeds=inputs_embeds, return_dict=return_dict, ) pooled_output =", "transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) mmbt = MMBTModel(config, transformer, encoder) \"\"\" output_attentions", "shape `(batch_size, sequence_length)`: Mask to avoid performing attention on padding token indices. Mask", "...modeling_outputs import BaseModelOutputWithPooling, SequenceClassifierOutput from ...modeling_utils import ModuleUtilsMixin from ...utils import logging logger", "2.0 (the \"License\"); # you may not use this file except in compliance", "Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids", "*optional*): Sequence of hidden-states at the output of the last layer of the", "the second modality. It should take in a batch of modal inputs and", "self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states )", "dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0),", "input_modal, start_token=None, end_token=None, position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "= input_ids.size() elif inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You", "- 0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not", "# # Unless required by applicable law or agreed to in writing, software", "express or implied. # See the License for the specific language governing permissions", "embedding_output = torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask", "`(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions of the non-text modality.", "MMBTForClassification(nn.Module): r\"\"\" **labels**: (*optional*) `torch.LongTensor` of shape `(batch_size,)`: Labels for computing the sequence", "`[0, 1]`: - 1 for tokens that are **not masked**, - 0 for", "Indices of input sequence tokens in the vocabulary. It does not expect [CLS]", "For example purposes. Not runnable. transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model =", "either express or implied. # See the License for the specific language governing", "Other Modality Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens (`torch.LongTensor` of", "MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config = config self.transformer =", "`input_ids` indices into associated vectors than the model's internal embedding lookup matrix. encoder_hidden_states", "Sequence of hidden-states at the output of the last layer of the encoder.", "input_ids or inputs_embeds\") device = input_ids.device if input_ids is not None else inputs_embeds.device", "is not None: if self.num_labels == 1: # We are doing regression loss_fct", "MMBT Model with a sequence classification/regression head on top (a linear layer on", "library implements for all its model (such as downloading or saving, resizing the", "embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids)", "model that fuses information from text and other image encoders, and obtain state-of-the-art", "encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder( embedding_output, attention_mask=extended_attention_mask, head_mask=head_mask,", "on top (a linear layer on top of the pooled output) \"\"\", MMBT_START_DOCSTRING,", "the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`.", "return_dict=return_dict, ) pooled_output = outputs[1] pooled_output = self.dropout(pooled_output) logits = self.classifier(pooled_output) loss =", "modal_position_ids (`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`, *optional*): Indices of positions of each input", "= loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if", "token_type_ids=modal_token_type_ids, ) input_modal_shape = modal_embeddings.size()[:-1] if token_type_ids is None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long,", "hidden-states at the output of the last layer of the encoder. Used in", "the License. # You may obtain a copy of the License at #", "used by MMBT. It should have embeddings, encoder, and pooler attributes. encoder (:class:", "input_modal (`torch.FloatTensor` of shape `(batch_size, ***)`): The other modality data. It will be", "saving, resizing the input embeddings, pruning heads etc.) This model is also a", "returned tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return", "layer of the encoder. Used in the cross-attention if the model is configured", "the weights associated with the model, only the configuration. transformer (:class: *~nn.Module*): A", "**hidden_states**: (*optional*, returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of", "position_ids=None, token_type_ids=None): token_embeddings = self.proj_embeddings(self.encoder(input_modal)) seq_length = token_embeddings.size(1) if start_token is not None:", "if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device) position_ids = position_ids.unsqueeze(0).expand(input_modal.size(0), seq_length)", "added to Other Modality Embedding. [CLS] Most commonly used for classification tasks. modal_end_tokens", "of shape `(batch_size, sequence_length)`): Indices of input sequence tokens in the vocabulary. It", "to return the hidden states of all layers. See `hidden_states` under returned tensors", "shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention on the padding token", "model is also a PyTorch [torch.nn.Module](https://pytorch.org/docs/stable/nn.html#torch.nn.Module) subclass. Use it as a regular PyTorch", "else self.config.output_attentions output_hidden_states = ( output_hidden_states if output_hidden_states is not None else self.config.output_hidden_states", "= MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def forward(", "= value @add_start_docstrings( \"\"\" MMBT Model with a sequence classification/regression head on top", "Classifying Images and Text](https://github.com/facebookresearch/mmbt) by <NAME>, <NAME>, <NAME>, <NAME>. It's a supervised multimodal", "for classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to", "configuration class with all the parameters of the model. Initializing with a config", "sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels - 1]`. If", "0 for tokens that are **masked**. output_attentions (`bool`, *optional*): Whether or not to", "specific head on top.\", MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer,", "of the model. Initializing with a config file does not load the weights", "Indices of positions of each input sequence tokens in the position embeddings for", "shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax, used to", "range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) head_mask (`torch.FloatTensor` of shape", "encoder_outputs[1:] return BaseModelOutputWithPooling( last_hidden_state=sequence_output, pooler_output=pooled_output, hidden_states=encoder_outputs.hidden_states, attentions=encoder_outputs.attentions, ) def get_input_embeddings(self): return self.embeddings.word_embeddings def", "are attention masks?](../glossary#attention-mask) token_type_ids (*optional*) `torch.LongTensor` of shape `(batch_size, sequence_length)`: Segment token indices", "= self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "performance on various multimodal classification benchmark tasks. This model inherits from [`PreTrainedModel`]. Check", "shape `(batch_size,)`: Labels for computing the sequence classification/regression loss. Indices should be in", "loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) if not return_dict: output =", "torch.cat( [torch.ones(input_modal_shape, device=device, dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask =", "vectors than the model's internal embedding lookup matrix. encoder_hidden_states (`torch.FloatTensor` of shape `(batch_size,", "input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\" def __init__(self, config, transformer, encoder): super().__init__()", "range `[0, config.max_position_embeddings - 1]`. [What are position IDs?](../glossary#position-ids) modal_position_ids (`torch.LongTensor` of shape", "or not to return a [`~file_utils.ModelOutput`] instead of a plain tuple. \"\"\" @add_start_docstrings(", "config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square", "layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax,", "1 indicates the head is **not masked**, - 0 indicates the head is", "= torch.cat([modal_embeddings, txt_embeddings], 1) input_shape = embedding_output.size()[:-1] if attention_mask is None: attention_mask =", "returned tensors for more detail. return_dict (`bool`, *optional*): Whether or not to return", "*optional*): Mask to avoid performing attention on the padding token indices of the", "of modal inputs and return k, n dimension embeddings. \"\"\" MMBT_INPUTS_DOCSTRING = r\"\"\"", "dim=1) if end_token is not None: end_token_embeds = self.word_embeddings(end_token) seq_length += 1 token_embeddings", "of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions of the", "are input IDs?](../glossary#input-ids) modal_start_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional start token to", "the position embeddings. Selected in the range `[0, config.max_position_embeddings - 1]`. [What are", "decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask to avoid performing attention", "num_heads, sequence_length, sequence_length)`: Attentions weights after the attention softmax, used to compute the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "self.LayerNorm(embeddings) embeddings = self.dropout(embeddings) return embeddings MMBT_START_DOCSTRING = r\"\"\" MMBT model was proposed", "layer + the output of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states", "benchmark tasks. This model inherits from [`PreTrainedModel`]. Check the superclass documentation for the", "config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None,", "**attentions**: (*optional*, returned when `output_attentions=True`) list of `torch.FloatTensor` (one for each layer) of", "MMBTForClassification(config, transformer, encoder) outputs = model(input_modal, input_ids, labels=labels) loss, logits = outputs[:2] ```\"\"\"", "output_hidden_states (`bool`, *optional*): Whether or not to return the hidden states of all", "None: token_type_ids = torch.ones(input_txt_shape, dtype=torch.long, device=device) txt_embeddings = self.transformer.embeddings( input_ids=input_ids, position_ids=position_ids, token_type_ids=token_type_ids, inputs_embeds=inputs_embeds", "indices. Mask values selected in `[0, 1]`: - 1 for tokens that are", "permissions and # limitations under the License. \"\"\"PyTorch MMBT model. \"\"\" import torch", "[SEP] Most commonly used. attention_mask (*optional*) `torch.FloatTensor` of shape `(batch_size, sequence_length)`: Mask to", "of the last layer of the encoder. Used in the cross-attention if the", "encoder (:class: *~nn.Module*): Encoder for the second modality. It should take in a", "(a linear layer on top of the pooled output) \"\"\", MMBT_START_DOCSTRING, MMBT_INPUTS_DOCSTRING, )", "= \"MMBTConfig\" class ModalEmbeddings(nn.Module): \"\"\"Generic Modal Embeddings which takes in an encoder, and", "MMBT_START_DOCSTRING, ) class MMBTModel(nn.Module, ModuleUtilsMixin): def __init__(self, config, transformer, encoder): super().__init__() self.config =", "dtype=torch.long), attention_mask], dim=1 ) if encoder_attention_mask is None: encoder_attention_mask = torch.ones(input_shape, device=device) else:", "`torch.LongTensor` of shape `(batch_size, modal_sequence_length)`: Segment token indices to indicate different portions of", "config_class=_CONFIG_FOR_DOC) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None, position_ids=None, modal_position_ids=None,", "**not masked**, - 0 for tokens that are **masked**. [What are attention masks?](../glossary#attention-mask)", "\"\"\" output_attentions = output_attentions if output_attentions is not None else self.config.output_attentions output_hidden_states =", "returned when `labels` is provided) `torch.FloatTensor` of shape `(1,)`: Classification (or regression if", "an embedded representation. This is useful if you want more control over how", "except in compliance with the License. # You may obtain a copy of", "hidden states of all layers. See `hidden_states` under returned tensors for more detail.", "= torch.cat([token_embeddings, end_token_embeds.unsqueeze(1)], dim=1) if position_ids is None: position_ids = torch.arange(seq_length, dtype=torch.long, device=input_modal.device)", "# coding=utf-8 # Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c)", "superclass documentation for the generic methods the library implements for all its model", "of a plain tuple. \"\"\" @add_start_docstrings( \"The bare MMBT Model outputting raw hidden-states", "Module and refer to the PyTorch documentation for all matter related to general", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "device=input_modal.device ) position_embeddings = self.position_embeddings(position_ids) token_type_embeddings = self.token_type_embeddings(token_type_ids) embeddings = token_embeddings + position_embeddings", "if attention_mask is None: attention_mask = torch.ones(input_shape, device=device) else: attention_mask = torch.cat( [torch.ones(input_modal_shape,", "for computing the sequence classification/regression loss. Indices should be in `[0, ..., config.num_labels", "= nn.Linear(config.hidden_size, config.num_labels) def forward( self, input_modal, input_ids=None, modal_start_tokens=None, modal_end_tokens=None, attention_mask=None, token_type_ids=None, modal_token_type_ids=None,", "if token_type_ids is None: token_type_ids = torch.zeros( (input_modal.size(0), seq_length), dtype=torch.long, device=input_modal.device ) position_embeddings", "subclass. Use it as a regular PyTorch Module and refer to the PyTorch", "self.num_labels == 1: # We are doing regression loss_fct = MSELoss() loss =", "be in `[0, ..., config.num_labels - 1]`. If `config.num_labels == 1` a regression", "parameters of the model. Initializing with a config file does not load the", "inputs_embeds is not None: input_txt_shape = inputs_embeds.size()[:-1] else: raise ValueError(\"You have to specify", "of the embeddings) of shape `(batch_size, sequence_length, hidden_size)`: Hidden-states of the model at", "def __init__(self, config, transformer, encoder): super().__init__() self.num_labels = config.num_labels self.mmbt = MMBTModel(config, transformer,", "on padding token indices. Mask values selected in `[0, 1]`: - 1 for", "implements for all its model (such as downloading or saving, resizing the input", "classification tasks. modal_end_tokens (`torch.LongTensor` of shape `(batch_size,)`, *optional*): Optional end token to be", "Copyright (c) Facebook, Inc. and its affiliates. # Copyright (c) HuggingFace Inc. team.", "= MSELoss() loss = loss_fct(logits.view(-1), labels.view(-1)) else: loss_fct = CrossEntropyLoss() loss = loss_fct(logits.view(-1,", "self.get_extended_attention_mask(attention_mask, input_shape, self.device) encoder_extended_attention_mask = self.invert_attention_mask(encoder_attention_mask) head_mask = self.get_head_mask(head_mask, self.config.num_hidden_layers) encoder_outputs = self.transformer.encoder(", "last layer of the encoder. Used in the cross-attention if the model is", "`(batch_size, sequence_length, embedding_dim)`, *optional*): Optionally, instead of passing `input_ids` you can choose to", "tensors for more detail. output_hidden_states (`bool`, *optional*): Whether or not to return the", "specific language governing permissions and # limitations under the License. \"\"\"PyTorch MMBT model.", "Optional start token to be added to Other Modality Embedding. [CLS] Most commonly", "embeddings = token_embeddings + position_embeddings + token_type_embeddings embeddings = self.LayerNorm(embeddings) embeddings = self.dropout(embeddings)", "returned when `output_hidden_states=True`) list of `torch.FloatTensor` (one for the output of each layer", "or `(num_layers, num_heads)`, *optional*): Mask to nullify selected heads of the self-attention modules.", "Check the superclass documentation for the generic methods the library implements for all", "self.mmbt = MMBTModel(config, transformer, encoder) self.dropout = nn.Dropout(config.hidden_dropout_prob) self.classifier = nn.Linear(config.hidden_size, config.num_labels) def", "transformer = BertModel.from_pretrained('bert-base-uncased') encoder = ImageEncoder(args) model = MMBTForClassification(config, transformer, encoder) outputs =", "is configured as a decoder. encoder_attention_mask (`torch.FloatTensor` of shape `(batch_size, sequence_length)`, *optional*): Mask", "inputs_embeds is not None: raise ValueError(\"You cannot specify both input_ids and inputs_embeds at", "*optional*): Optional end token to be added to Other Modality Embedding. [SEP] Most", "Parameters: config ([`MMBTConfig`]): Model configuration class with all the parameters of the model.", "modality. The embeddings from these tokens will be summed with the respective token", "have to specify either input_ids or inputs_embeds\") device = input_ids.device if input_ids is", "of `torch.FloatTensor` (one for each layer) of shape `(batch_size, num_heads, sequence_length, sequence_length)`: Attentions", "respective token embeddings for the non-text modality. position_ids (`torch.LongTensor` of shape `(batch_size, sequence_length)`,", "documentation for the generic methods the library implements for all its model (such" ]
[ ") from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, )", "validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) ->", "bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block", "of blocks which were canonical and now are no longer canonical. \"\"\" try:", "#{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for", "Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB,", "\"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block", "slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self,", ").create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block", "self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return imported_block,", "self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if", "the block hash with the given number in the canonical chain. Raise ``BlockNotFound``", "BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self)", "ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import (", "one or more ``StateMachine`` classes. Each ``StateMachine`` is associated with a range of", "def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at the canonical chain head.", "if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for block slot:", "just persists all state. Should design how to clean up the old state.", "a complete block and returns a 3-tuple - the imported block - a", "# type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None #", "@classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod", "sm_class( chaindb=self.chaindb, block=block, ) # # Block API # def get_block_class(self, block_root: Hash32)", "parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import block #{}. Cannot", "return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\"", "-> 'BaseBeaconChain': pass # # State Machine API # @classmethod @abstractmethod def get_state_machine_class(", "else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the", "# TODO: Now it just persists all state. Should design how to clean", "self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), )", "canonical head. \"\"\" if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams())", "from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound,", "block of the canonical head. \"\"\" if block is None: head = self.get_canonical_head()", "def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass", "block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32)", "chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block:", "matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\"", "delegating operations to the appropriate StateMachine depending on the current block slot number.", "instantiated with an empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration)", "type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int # # Helpers", "= self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root),", "available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return", "sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough", "'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db)", "chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined for the canonical chain.", "and returns a 3-tuple - the imported block - a tuple of blocks", "the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists", "of blocks which are now part of the canonical chain. - a tuple", "pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) ->", "validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def", "-> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod", "( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import", "# type: int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]:", "BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a", "def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the", "if not self.sm_configuration: raise ValueError( \"The Chain class cannot be instantiated with an", "Chain class acts as a wrapper around these other StateMachine classes, delegating operations", "there is no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None)", "StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot:", "``BlockNotFound`` if there's no block with the given hash in the db. \"\"\"", "def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\" return self.get_state_machine().block", "abstractmethod, ) import logging from typing import ( TYPE_CHECKING, Tuple, Type, ) from", "tuple of blocks which are now part of the canonical chain. - a", "the canonical head. \"\"\" if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head,", "def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass", "self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block.", "before importing \" \"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) )", "-> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass", "def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None)", "imported block - a tuple of blocks which are now part of the", "get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod", "cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize", "Raise ``BlockNotFound`` if there's no block with the given number in the canonical", "self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass", "genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API #", "chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]],", "def get_score(self, block_root: Hash32) -> int: \"\"\" Return the score of the block", "BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self,", "-> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the given block slot number.", "``StateMachine`` instance for the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def", "get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock:", "number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self,", "type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None", "def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass", "returns a 3-tuple - the imported block - a tuple of blocks which", "type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None # type:", "in the canonical chain. Raise ``BlockNotFound`` if there's no block with the given", "defined for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return", "Slot) -> BaseBeaconBlock: \"\"\" Return the block with the given number in the", "the current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB #", "of slots. The Chain class acts as a wrapper around these other StateMachine", "def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) ->", "cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState,", "# # Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState,", "= None # type: int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls)", "class cannot be instantiated with an empty `sm_configuration`\" ) else: # TODO implment", "None: raise AttributeError(\"Chain classes must define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot,", "block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block)", "# noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for", "except BlockNotFound: raise ValidationError( \"Attempt to import block #{}. Cannot import block {}", "is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\"", "-> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class of the block descending", "block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def", "is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self)", "self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block API", "import ( validate_word, ) from eth_typing import ( Hash32, ) from eth_utils import", "db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self)", "state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s", "there's no block with the given hash in the db. \"\"\" validate_word(block_root, title=\"Block", "# @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block:", "logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB)", "return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block with", "\"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import block", "TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers #", "the given hash. Raise ``BlockNotFound`` if there is no matching black hash. \"\"\"", "-> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the given block slot number.", "from typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable,", "Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock,", "return cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state:", ") from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, )", "import ( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING:", "self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the block hash with", "int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self)", "instance for the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot(", "\"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent(", "from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, )", "BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( \"The Chain class cannot be", "raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) )", "Returns the ``StateMachine`` instance for the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot)", "self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: \"\"\" Return the", "\"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) #", "canonical chain. Raise ``BlockNotFound`` if there's no block with the given number in", "block=block, ) # # Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]:", "int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass #", "BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all BeaconChain objects \"\"\" chaindb =", "cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class:", "BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod", "objects \"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class = None # type:", "-> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot:", "Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod", "= self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported", "return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it", "import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base", "the canonical chain. Raise ``BlockNotFound`` if there's no block with the given number", "self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it is", "ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod", "Raise ``CanonicalHeadNotFound`` if there's no head defined for the canonical chain. \"\"\" block_root", "genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine", "= self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block API # def", "get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod", "BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration:", "# # State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock)", "-> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the given block number. \"\"\"", "None # type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id", "-> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass", "of the canonical head. \"\"\" if block is None: head = self.get_canonical_head() return", "parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if", "pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock,", "self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...],", "the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def", "is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain API #", "= sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\"", "for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the", "...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and returns a 3-tuple -", ") from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError,", "raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) ->", ") import logging from typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes", "= BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not", "genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db)", "\"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API #", "block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # #", "Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and returns a 3-tuple", "Cannot import block {} before importing \" \"its parent block at {}\".format( block.slot,", "# @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':", "self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at", "logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None:", "head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock:", "= self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) ->", "def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params:", "Return the block hash with the given number in the canonical chain. Raise", "cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain API", "block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block as specified by block", "the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK:", "if it is not ``None``, otherwise return the block of the canonical head.", "= self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: \"\"\" Return", "( validate_word, ) from eth_typing import ( Hash32, ) from eth_utils import (", "sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls,", "is associated with a range of slots. The Chain class acts as a", "from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State", "The base class for all BeaconChain objects \"\"\" chaindb = None # type:", "Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock:", "block - a tuple of blocks which are now part of the canonical", "{}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod", "slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']:", "hash with the given number in the canonical chain. Raise ``BlockNotFound`` if there's", "cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine``", "or more ``StateMachine`` classes. Each ``StateMachine`` is associated with a range of slots.", "ABC): \"\"\" The base class for all BeaconChain objects \"\"\" chaindb = None", "block number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block,", "@abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None)", "get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32:", "are no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError(", "pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # #", "return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the block hash", "block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class", "range of slots. The Chain class acts as a wrapper around these other", "Chain class cannot be instantiated with an empty `sm_configuration`\" ) else: # TODO", "pass # # State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block:", "get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot)", "block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) ->", "ValueError( \"The Chain class cannot be instantiated with an empty `sm_configuration`\" ) else:", "cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine':", "import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import", "given hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return", "block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock:", "an empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb", "at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the given block", "def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the", "block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the", "``BeaconChain`` from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class:", "API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine``", "eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from", "None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int #", "noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all", "not ``None``, otherwise return the block of the canonical head. \"\"\" if block", "canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\"", "from abc import ( ABC, abstractmethod, ) import logging from typing import (", "validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the block", "at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined for", "-> BaseBeaconBlock: \"\"\" Return ``block`` if it is not ``None``, otherwise return the", "Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a", "with the given hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class =", "( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import", "genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis state. \"\"\"", ") ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db:", "{}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state,", "state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis", "BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API", "acts as a wrapper around these other StateMachine classes, delegating operations to the", "block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the given block", "\"\"\" Return the block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's", "# # Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot =", "score of the block with the given hash. Raise ``BlockNotFound`` if there is", "cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the given", "with the given number in the canonical chain. Raise ``BlockNotFound`` if there's no", "Hash32) -> int: \"\"\" Return the score of the block with the given", "Slot) -> Hash32: \"\"\" Return the block hash with the given number in", "# type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id =", "def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) ->", "@classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block", "Return the current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) ->", "-> Hash32: \"\"\" Return the block hash with the given number in the", "self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\"", "Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class", "( BlockNotFound, ) from eth.validation import ( validate_word, ) from eth_typing import (", "at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) ->", "BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis state.", "return the block of the canonical head. \"\"\" if block is None: head", "of the canonical chain. - a tuple of blocks which were canonical and", "Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod", "else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # #", "block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state)", "a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError(", "``StateMachine`` instance for the given block number. \"\"\" block = self.ensure_block(at_block) sm_class =", "@abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock:", "-> BaseBeaconBlock: \"\"\" Return the block at the canonical chain head. Raise ``CanonicalHeadNotFound``", "pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) ->", "as a wrapper around these other StateMachine classes, delegating operations to the appropriate", "how to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) =", "get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod", "chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls, block:", "state. Should design how to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks,", "base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the", "old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s',", "from eth.exceptions import ( BlockNotFound, ) from eth.validation import ( validate_word, ) from", "given number in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot:", "BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from eth.validation import ( validate_word,", "StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block)", "slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot))", "= self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO:", "# Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock)", "which were canonical and now are no longer canonical. \"\"\" try: parent_block =", "-> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def", "up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug(", "the given number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self,", "BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the given block slot", "tuple of blocks which were canonical and now are no longer canonical. \"\"\"", "block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all state.", "all BeaconChain objects \"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class = None", "BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis block. \"\"\" chaindb", "block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def", "AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls,", "# validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls)", "from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from", "head. Raise ``CanonicalHeadNotFound`` if there's no head defined for the canonical chain. \"\"\"", "block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import", "return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at the", "``None``, otherwise return the block of the canonical head. \"\"\" if block is", ") = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot,", "must define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if", "implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod", "bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A", "chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self,", "Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int # # Helpers #", "raise ValidationError( \"Attempt to import block #{}. Cannot import block {} before importing", "BlockNotFound: raise ValidationError( \"Attempt to import block #{}. Cannot import block {} before", "block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock:", "return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the", "block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def", "self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block API # def get_block_class(self,", "import block #{}. Cannot import block {} before importing \" \"its parent block", "the canonical chain. - a tuple of blocks which were canonical and now", "blocks which were canonical and now are no longer canonical. \"\"\" try: parent_block", "pass # # Block API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]:", "other StateMachine classes, delegating operations to the appropriate StateMachine depending on the current", "combination of one or more ``StateMachine`` classes. Each ``StateMachine`` is associated with a", "# # Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block:", "@abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int:", "the ``StateMachine`` class of the block descending from the given block. \"\"\" return", "@abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls,", "all state. Should design how to clean up the old state. self.chaindb.persist_state(state) (", "genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis block. \"\"\"", "@abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass #", "eth_typing import ( Hash32, ) from eth_utils import ( ValidationError, encode_hex, ) from", "# # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # #", "for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No", "None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return", "import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import", "@classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for", "Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) ->", "= None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int", "pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot(", "if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) ->", "old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot", "BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot)", "with the given number in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def", ") -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and", "def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root:", "-> BaseBeaconBlock: \"\"\" Return the block with the given number in the canonical", "the requested block as specified by block hash. Raise ``BlockNotFound`` if there's no", "-> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db:", ") from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot,", "# @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def", "# # StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\"", "( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base class", "in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return", "genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API # @classmethod @abstractmethod", "import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states", "= self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams)", "BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import (", "Block API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def", "block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB]", "BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import (", "BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the given block number.", "def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the block hash with the", "from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, )", "on the current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB", "the ``StateMachine`` instance for the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod", "the imported block - a tuple of blocks which are now part of", "a tuple of blocks which were canonical and now are no longer canonical.", "Each ``StateMachine`` is associated with a range of slots. The Chain class acts", "from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( #", "BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain):", "sm_class raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None)", "no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock:", "the current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock:", "Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination", "slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the given block", "BeaconChain objects \"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class = None #", "\"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) ->", "more ``StateMachine`` classes. Each ``StateMachine`` is associated with a range of slots. The", "the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot:", "``block`` if it is not ``None``, otherwise return the block of the canonical", "\"\"\" Return the current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot)", "\"\"\" Return the requested block as specified by block hash. Raise ``BlockNotFound`` if", "self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return", "Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and returns a 3-tuple - the", "from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) #", "slot number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes must define the", "class for the given block slot number. \"\"\" if cls.sm_configuration is None: raise", "...] chain_id = None # type: int # # Helpers # @classmethod @abstractmethod", "- a tuple of blocks which are now part of the canonical chain.", "return cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) ->", "block) # TODO: Now it just persists all state. Should design how to", "chaindb = None # type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration", "these other StateMachine classes, delegating operations to the appropriate StateMachine depending on the", "'BaseBeaconChain': pass # # State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls,", "block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: \"\"\"", "( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine,", "slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self,", "Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self,", "in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) ->", "# Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root)", "def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass", "int: \"\"\" Return the score of the block with the given hash. Raise", "eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from", "no block with the given number in the canonical chain. \"\"\" validate_slot(slot) return", "genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock)", "genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the", "are now part of the canonical chain. - a tuple of blocks which", "helper to the ``StateMachine`` class of the block descending from the given block.", "hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root,", "eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, ) if", "@abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock:", "is None: raise AttributeError(\"Chain classes must define the StateMachines in sm_configuration\") validate_slot(slot) for", "= self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block", "ValidationError( \"Attempt to import block #{}. Cannot import block {} before importing \"", "the block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head", "pass # # Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state:", "chain_id = None # type: int # # Helpers # @classmethod @abstractmethod def", "from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, )", "eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from", "type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError(", ") from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged,", "\"\"\" Passthrough helper to the ``StateMachine`` class of the block descending from the", "BaseBeaconBlock: \"\"\" Return the block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if", "BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot:", "chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return", "the given hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root)", "-> BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self,", "StateMachine classes, delegating operations to the appropriate StateMachine depending on the current block", "classes, delegating operations to the appropriate StateMachine depending on the current block slot", ") class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all BeaconChain objects \"\"\"", "@classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class", "the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined for the", "None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) ->", "Return the ``StateMachine`` class for the given block slot number. \"\"\" if cls.sm_configuration", "block hash with the given number in the canonical chain. Raise ``BlockNotFound`` if", "eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The", "import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from eth.validation import", "slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return imported_block, new_canonical_blocks, old_canonical_blocks", "\"Attempt to import block #{}. Cannot import block {} before importing \" \"its", "Return the requested block as specified by block hash. Raise ``BlockNotFound`` if there's", "self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) ->", "a wrapper around these other StateMachine classes, delegating operations to the appropriate StateMachine", "# State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) ->", "the ``BeaconChain`` from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) !=", "the ``StateMachine`` instance for the given block number. \"\"\" block = self.ensure_block(at_block) sm_class", "the block of the canonical head. \"\"\" if block is None: head =", "``StateMachine`` classes. Each ``StateMachine`` is associated with a range of slots. The Chain", "given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else", "self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s |", "the block descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1", "block with the given hash. Raise ``BlockNotFound`` if there is no matching black", "None # type: int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) ->", "def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from", "eth.validation import ( validate_word, ) from eth_typing import ( Hash32, ) from eth_utils", "-> None: if not self.sm_configuration: raise ValueError( \"The Chain class cannot be instantiated", "of the block descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot +", "at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), )", "block with the given hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class", "design how to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, )", "'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return imported_block, new_canonical_blocks,", "state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block)", "block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine``", "@abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock,", "= cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock)", "get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the given", "chaindb=self.chaindb, block=block, ) # # Block API # def get_block_class(self, block_root: Hash32) ->", "``BlockNotFound`` if there is no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self,", "title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\"", "slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block with the given number in", "def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return", "BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all BeaconChain objects", "-> int: \"\"\" Return the score of the block with the given hash.", "perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all state. Should design", "\"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB]", "``StateMachine`` class of the block descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot(", "@abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API", "operations to the appropriate StateMachine depending on the current block slot number. \"\"\"", "block_root: Hash32) -> int: \"\"\" Return the score of the block with the", "if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain", "Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API", "Tuple, Type, ) from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import (", "Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None #", "# @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance", "sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params:", "3-tuple - the imported block - a tuple of blocks which are now", "BaseBeaconBlock: \"\"\" Return the requested block as specified by block hash. Raise ``BlockNotFound``", "TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC):", "of the block with the given hash. Raise ``BlockNotFound`` if there is no", "imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all", "self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def", "( Hash32, ) from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import", "``BlockNotFound`` if there's no block with the given number in the canonical chain.", "def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( \"The Chain", "block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass", "canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def", "BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import", "with the given hash. Raise ``BlockNotFound`` if there is no matching black hash.", "if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable,", "BaseBeaconBlock: \"\"\" Return the block with the given number in the canonical chain.", "to import block #{}. Cannot import block {} before importing \" \"its parent", "eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa:", "else: return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP block.", "Should design how to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks,", "BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb", "perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\"", "\"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block", "importing \" \"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import", ") base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate", "number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\"", "if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all state. Should", "slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance", "the ``BeaconChain`` from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return", "as specified by block hash. Raise ``BlockNotFound`` if there's no block with the", "Passthrough helper to the ``StateMachine`` class of the block descending from the given", "\"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block,", "block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block as", "\"\"\" if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return", "# type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise", "def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the", "Slot) -> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True )", "Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass", "def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # #", "parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) ->", "Return the ``StateMachine`` instance for the given block number. \"\"\" block = self.ensure_block(at_block)", "self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block with the", "= logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) ->", "\"\"\" Initialize the ``BeaconChain`` from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if", "eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, ) from", "encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB,", "@abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block( self, block:", "BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it is not ``None``, otherwise return", "of one or more ``StateMachine`` classes. Each ``StateMachine`` is associated with a range", "Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return", "Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def", "complete block and returns a 3-tuple - the imported block - a tuple", "block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass", "by block hash. Raise ``BlockNotFound`` if there's no block with the given hash", "-> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def", "-> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def get_block_class(self, block_root: Hash32)", "# @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\"", "\"\"\" Return the block hash with the given number in the canonical chain.", "were canonical and now are no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root)", "it just persists all state. Should design how to clean up the old", "# Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it", "@abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...],", "import ( Hash32, ) from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz", "+ 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root:", "``BeaconChain`` from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db)", "class acts as a wrapper around these other StateMachine classes, delegating operations to", "self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is", "head defined for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root)", "slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def get_block_class(self,", "given block slot number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes must", "base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from", "\"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root:", "is not ``None``, otherwise return the block of the canonical head. \"\"\" if", "BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self,", "given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot)", "if there's no block with the given hash in the db. \"\"\" validate_word(block_root,", "it is not ``None``, otherwise return the block of the canonical head. \"\"\"", "Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod", "from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is", "genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given", "\" \"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import =", "create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32)", "# def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot)", "validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, )", "given hash. Raise ``BlockNotFound`` if there is no matching black hash. \"\"\" return", "ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it is not ``None``,", "class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return", "for the given block number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return", "to the ``StateMachine`` class of the block descending from the given block. \"\"\"", "Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the given block slot number. \"\"\"", "otherwise return the block of the canonical head. \"\"\" if block is None:", "def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock,", "StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': \"\"\"", "return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params)", "API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod", "base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( \"The Chain class cannot", "None: if not self.sm_configuration: raise ValueError( \"The Chain class cannot be instantiated with", "from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain``", "block hash. Raise ``BlockNotFound`` if there's no block with the given hash in", "black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return", "StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState,", "Slot, ) from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import", "return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the", "# StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns", "# TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers", "slot: Slot) -> Hash32: \"\"\" Return the block hash with the given number", "\"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db:", "API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) ->", "BlockNotFound, ) from eth.validation import ( validate_word, ) from eth_typing import ( Hash32,", "to clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block,", "<filename>eth2/beacon/chains/base.py from abc import ( ABC, abstractmethod, ) import logging from typing import", "\"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the", "eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing", "abc import ( ABC, abstractmethod, ) import logging from typing import ( TYPE_CHECKING,", "( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import (", "the given number in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self,", "Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and returns a", "get_canonical_block_root(self, slot: Slot) -> Hash32: \"\"\" Return the block hash with the given", "def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the", "block with the given number in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot))", ") from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot,", "get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams)", "StateMachine depending on the current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class", "chain. Raise ``BlockNotFound`` if there's no block with the given number in the", "( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import (", "block with the given number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def", "clean up the old state. self.chaindb.persist_state(state) ( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__)", "( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks", "pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def", "...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of one or", "BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a", "@classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass", "the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None", "-> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class", "Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of one", "number in the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot)", "validate_imported_block_unchanged(imported_block, block) # TODO: Now it just persists all state. Should design how", ") from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import (", "self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current", "block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the", ") if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class", "-> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) ->", "associated with a range of slots. The Chain class acts as a wrapper", "get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block with the given number", "the canonical chain. \"\"\" validate_slot(slot) return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot)) def get_canonical_block_root(self, slot: Slot) -> Hash32:", "requested block as specified by block hash. Raise ``BlockNotFound`` if there's no block", "block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class", "get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class", "type: int # # Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass", "Return the block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no", "= self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import block #{}. Cannot import", "BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API # @classmethod", "block at the canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined", "start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine", "the block with the given number in the canonical chain. Raise ``BlockNotFound`` if", "parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class", "State Machine API # @classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']:", "@classmethod @abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self,", "BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self,", "BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine API # @classmethod @abstractmethod def", "import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable, ) from", "eth.exceptions import ( BlockNotFound, ) from eth.validation import ( validate_word, ) from eth_typing", "not self.sm_configuration: raise ValueError( \"The Chain class cannot be instantiated with an empty", "API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self,", "eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, )", "`sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db)", "-> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis state. \"\"\" sm_class =", "canonical chain. - a tuple of blocks which were canonical and now are", "import block {} before importing \" \"its parent block at {}\".format( block.slot, block.signed_root,", "now part of the canonical chain. - a tuple of blocks which were", "BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis block.", "block as specified by block hash. Raise ``BlockNotFound`` if there's no block with", "which are now part of the canonical chain. - a tuple of blocks", "# # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None:", "BaseBeaconBlock: \"\"\" Return ``block`` if it is not ``None``, otherwise return the block", "block with the given number in the canonical chain. Raise ``BlockNotFound`` if there's", "for the given block slot number. \"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls,", "and now are no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound:", "set\") return cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB,", "with a range of slots. The Chain class acts as a wrapper around", "return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper", "return sm_class( chaindb=self.chaindb, block=block, ) # # Block API # def get_block_class(self, block_root:", "block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at the canonical chain", "canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import", "{} before importing \" \"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, )", "number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes must define the StateMachines", "\"\"\" The base class for all BeaconChain objects \"\"\" chaindb = None #", "( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from eth.validation import (", ") from eth_typing import ( Hash32, ) from eth_utils import ( ValidationError, encode_hex,", "genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis", "``CanonicalHeadNotFound`` if there's no head defined for the canonical chain. \"\"\" block_root =", "-> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete block and returns", "= cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block class: {},", "slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def", "def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for", "BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class of", "descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot", "Import a complete block and returns a 3-tuple - the imported block -", "chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':", "@abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod", "-> BaseBeaconBlock: \"\"\" Return the requested block as specified by block hash. Raise", ") from eth.validation import ( validate_word, ) from eth_typing import ( Hash32, )", "from eth.validation import ( validate_word, ) from eth_typing import ( Hash32, ) from", "the given block number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class(", "no head defined for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class =", "@abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot)", "Return the score of the block with the given hash. Raise ``BlockNotFound`` if", "canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True )", "block and returns a 3-tuple - the imported block - a tuple of", "def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot:", "BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis state. \"\"\" sm_class", "BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class of the block descending from", "logging from typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import (", "raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain API # @classmethod def", "the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >=", "- the imported block - a tuple of blocks which are now part", "from eth2.beacon.state_machines.base import ( # noqa: F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\"", "self.sm_configuration: raise ValueError( \"The Chain class cannot be instantiated with an empty `sm_configuration`\"", "block slot number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes must define", "def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod def", "\"\"\" Return the score of the block with the given hash. Raise ``BlockNotFound``", "# Block API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod", "the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class)", "Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def get_block_class(self, block_root: Hash32) ->", "now are no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise", "( new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed", ") chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB,", "get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the", "self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import block #{}. Cannot import block", "import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from", "chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if", "def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block as specified", "in the db. \"\"\" validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class)", "number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, )", "around these other StateMachine classes, delegating operations to the appropriate StateMachine depending on", "block {} before importing \" \"its parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root,", "\"\"\" Return the block with the given number in the canonical chain. Raise", "TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return", "canonical chain head. Raise ``CanonicalHeadNotFound`` if there's no head defined for the canonical", "...]]: \"\"\" Import a complete block and returns a 3-tuple - the imported", "# @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API #", "Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class`", "-> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis block. \"\"\" chaindb =", "from eth_typing import ( Hash32, ) from eth_utils import ( ValidationError, encode_hex, )", "BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock:", "create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine``", "class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of one or more ``StateMachine``", "if there's no head defined for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root()", "reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for block", "# type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int # #", "import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base import ( # noqa: F401", "cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) ->", "self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: \"\"\" Return the score of", "if there is no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block:", "ABC, abstractmethod, ) import logging from typing import ( TYPE_CHECKING, Tuple, Type, )", "def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API #", "the appropriate StateMachine depending on the current block slot number. \"\"\" logger =", "Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def", "\"\"\" Return the ``StateMachine`` instance for the given block number. \"\"\" block =", "block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot,", "in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation:", ") from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock,", "__init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( \"The Chain class", "self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at the canonical", "= self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block:", "import ( BlockNotFound, ) from eth.validation import ( validate_word, ) from eth_typing import", "a 3-tuple - the imported block - a tuple of blocks which are", "\"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock,", "if cls.sm_configuration is None: raise AttributeError(\"Chain classes must define the StateMachines in sm_configuration\")", "'BaseBeaconStateMachine': \"\"\" Return the ``StateMachine`` instance for the given block number. \"\"\" block", "part of the canonical chain. - a tuple of blocks which were canonical", "cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']:", "head. \"\"\" if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else:", "not set\") return cls.chaindb_class # # Chain API # @classmethod def from_genesis(cls, base_db:", "'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot)", "\"\"\" A Chain is a combination of one or more ``StateMachine`` classes. Each", ") from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB,", "validate_word(block_root, title=\"Block Hash\") block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_canonical_head(self) -> BaseBeaconBlock:", "there's no block with the given number in the canonical chain. \"\"\" validate_slot(slot)", "# Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block:", "FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod", "self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int:", "no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt", "return sm_class raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block:", ") from eth.exceptions import ( BlockNotFound, ) from eth.validation import ( validate_word, )", "Hash32: \"\"\" Return the block hash with the given number in the canonical", "# Helpers # @classmethod @abstractmethod def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain", "pass @abstractmethod def get_block(self) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) ->", "is no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) ->", "the block with the given hash. Raise ``BlockNotFound`` if there is no matching", "if there's no block with the given number in the canonical chain. \"\"\"", "get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\" return self.get_state_machine().block def", "new_canonical_blocks, old_canonical_blocks, ) = self.chaindb.persist_block(imported_block, imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root", "a combination of one or more ``StateMachine`` classes. Each ``StateMachine`` is associated with", "import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]:", ") from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, )", "eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound, ) from eth.validation", "@abstractmethod def get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block:", "from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from", "pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot:", "from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, )", "API # @classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':", "= self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class", "\"\"\" Return ``block`` if it is not ``None``, otherwise return the block of", "imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) #", "empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb =", "!= sm_class.block_class: raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class", "slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass", "BaseBeaconBlock=None) -> 'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']:", "a range of slots. The Chain class acts as a wrapper around these", "hash. Raise ``BlockNotFound`` if there's no block with the given hash in the", "given number in the canonical chain. Raise ``BlockNotFound`` if there's no block with", "chain. - a tuple of blocks which were canonical and now are no", "Hash32, ) from eth_utils import ( ValidationError, encode_hex, ) from eth2._utils.ssz import (", "typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable, )", "be instantiated with an empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) #", "from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise", ") state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block,", "import logging from typing import ( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import", "block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it is not ``None``, otherwise", ") -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain", "try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to import block #{}.", "@classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain``", "( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import", "the ``StateMachine`` class for the given block slot number. \"\"\" if cls.sm_configuration is", "get_score(self, block_root: Hash32) -> int: \"\"\" Return the score of the block with", "the score of the block with the given hash. Raise ``BlockNotFound`` if there", "# @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not", "FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from eth2.beacon.state_machines.base", "= None # type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...]", "block_root: Hash32) -> Type[BaseBeaconBlock]: pass @abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) ->", "@classmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize", "FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP", "imported_block.__class__) self.logger.debug( 'IMPORTED_BLOCK: slot %s | signed root %s', imported_block.slot, encode_hex(imported_block.signed_root), ) return", "import ( ValidationError, encode_hex, ) from eth2._utils.ssz import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain", "persists all state. Should design how to clean up the old state. self.chaindb.persist_state(state)", "block #{}. Cannot import block {} before importing \" \"its parent block at", "number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock,", "Initialize the ``BeaconChain`` from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__)", "def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class", "perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: \"\"\" Import a complete", "number in the canonical chain. Raise ``BlockNotFound`` if there's no block with the", "Chain API # @classmethod @abstractmethod def from_genesis(cls, base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock)", "BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32)", "\"\"\" Import a complete block and returns a 3-tuple - the imported block", "pass self.chaindb = self.get_chaindb_class()(base_db) # # Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']:", "TODO: Now it just persists all state. Should design how to clean up", "'BaseBeaconStateMachine': pass @classmethod @abstractmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass #", "BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block(", "get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation:", "cannot be instantiated with an empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration)", "\"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb =", "chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) ->", "is a combination of one or more ``StateMachine`` classes. Each ``StateMachine`` is associated", "-> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is", "wrapper around these other StateMachine classes, delegating operations to the appropriate StateMachine depending", "slot: Slot) -> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True", "Chain is a combination of one or more ``StateMachine`` classes. Each ``StateMachine`` is", "def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\" Return the block with the given", "BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import (", "to the appropriate StateMachine depending on the current block slot number. \"\"\" logger", "sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...] chain_id = None # type:", "def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if it is not", "@classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\")", "block def get_block(self) -> BaseBeaconBlock: \"\"\" Return the current TIP block. \"\"\" return", "Return the block with the given number in the canonical chain. Raise ``BlockNotFound``", "slots. The Chain class acts as a wrapper around these other StateMachine classes,", "get_canonical_head(self) -> BaseBeaconBlock: \"\"\" Return the block at the canonical chain head. Raise", "( TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base", ") # # Block API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot", "Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the given block slot number. \"\"\"", "Type[BaseBeaconStateMachine]], ...] chain_id = None # type: int # # Helpers # @classmethod", "sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class", "classes. Each ``StateMachine`` is associated with a range of slots. The Chain class", "eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions", "block descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot + 1 if", "for all BeaconChain objects \"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class =", "instance for the given block number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot)", "cls, slot: Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def", "{}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db,", "classes must define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration):", "for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class = self.get_block_class(block_root) return self.chaindb.get_block_by_root(block_root,", "@abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock:", "validate_word, ) from eth_typing import ( Hash32, ) from eth_utils import ( ValidationError,", "_from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the", "with an empty `sm_configuration`\" ) else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass", "raise ValueError( \"The Chain class cannot be instantiated with an empty `sm_configuration`\" )", "TYPE_CHECKING, Tuple, Type, ) from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import", "Raise ``BlockNotFound`` if there's no block with the given hash in the db.", "the given number in the canonical chain. Raise ``BlockNotFound`` if there's no block", "Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now it just", "return self.chaindb.get_block_by_root(block_root, block_class) def get_score(self, block_root: Hash32) -> int: \"\"\" Return the score", "``StateMachine`` is associated with a range of slots. The Chain class acts as", "Configurable, ) from eth.db.backends.base import ( BaseAtomicDB, ) from eth.exceptions import ( BlockNotFound,", "import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation", "( ABC, abstractmethod, ) import logging from typing import ( TYPE_CHECKING, Tuple, Type,", "sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available", "= self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block", "block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to", "hash. Raise ``BlockNotFound`` if there is no matching black hash. \"\"\" return self.chaindb.get_score(block_root)", "StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine':", "F401 BaseBeaconStateMachine, ) class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all BeaconChain", "get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class =", "cls.sm_configuration is None: raise AttributeError(\"Chain classes must define the StateMachines in sm_configuration\") validate_slot(slot)", "no block with the given number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot)", "self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation: validate_imported_block_unchanged(imported_block, block) # TODO: Now", "validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions import (", "1 if block_params.slot is None else block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32)", "import ( ABC, abstractmethod, ) import logging from typing import ( TYPE_CHECKING, Tuple,", "get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod", "FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) # Validate the imported block. if perform_validation:", "class BaseBeaconChain(Configurable, ABC): \"\"\" The base class for all BeaconChain objects \"\"\" chaindb", "appropriate StateMachine depending on the current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\")", "longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except BlockNotFound: raise ValidationError( \"Attempt to", "\"\"\" Initialize the ``BeaconChain`` from the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block,", "\"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block`` if", "raise AttributeError(\"Chain classes must define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class", "Raise ``BlockNotFound`` if there is no matching black hash. \"\"\" return self.chaindb.get_score(block_root) def", "def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block( self, block: BaseBeaconBlock,", "block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(),", "-> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) -> BaseBeaconBlock: pass @abstractmethod def get_score(self, block_root:", "# Helpers # @classmethod def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise", "Slot) -> Type['BaseBeaconStateMachine']: pass # # Block API # @abstractmethod def get_block_class(self, block_root:", "\"\"\" Returns the ``StateMachine`` instance for the given block slot number. \"\"\" return", "given number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block:", "pass @abstractmethod def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: pass @abstractmethod def get_canonical_head(self) ->", "get_chaindb_class(cls) -> Type[BaseBeaconChainDB]: pass # # Chain API # @classmethod @abstractmethod def from_genesis(cls,", "( FromBlockParams, Slot, ) from eth2.beacon.validation import ( validate_slot, ) if TYPE_CHECKING: from", "from eth2.beacon.types.blocks import ( BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from", "return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True ) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock,", "block_params.slot, ).create_block_from_parent(parent_block, block_params) def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested", "Return ``block`` if it is not ``None``, otherwise return the block of the", "None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # # Chain API # @classmethod", "StateMachine API # @classmethod def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the", "AttributeError(\"Chain classes must define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in", "self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return block_class def create_block_from_parent(self, parent_block: BaseBeaconBlock,", "Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block as specified by block hash.", "chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod", "BaseBeaconBlock, ) from eth2.beacon.types.states import ( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams,", "sm_class.block_class: raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block), sm_class.block_class )", "sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb, block=block, ) # # Block API #", "A Chain is a combination of one or more ``StateMachine`` classes. Each ``StateMachine``", "Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return the ``StateMachine`` class for the given block slot", "parent block at {}\".format( block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block,", "depending on the current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class =", "-> Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class #", ") ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block = self.get_state_machine(base_block_for_import).import_block(block) #", "define the StateMachines in sm_configuration\") validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot", "FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class of the block", "None # type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None", "base_db: BaseAtomicDB, genesis_state: BeaconState, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': pass # # State Machine", "specified by block hash. Raise ``BlockNotFound`` if there's no block with the given", "block.slot, block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block", "class for all BeaconChain objects \"\"\" chaindb = None # type: BaseBeaconChainDB chaindb_class", "return cls._from_genesis_block(base_db, genesis_block) @classmethod def _from_genesis_block(cls, base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\"", "@abstractmethod def create_block_from_parent(self, parent_block: BaseBeaconBlock, block_params: FromBlockParams) -> BaseBeaconBlock: pass @abstractmethod def get_block_by_root(self,", "Type[BaseBeaconChainDB] def __init__(self, base_db: BaseAtomicDB) -> None: if not self.sm_configuration: raise ValueError( \"The", "block.signed_root, block.previous_block_root, ) ) base_block_for_import = self.create_block_from_parent( parent_block, FromBlockParams(), ) state, imported_block =", "start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def get_state_machine(self,", "blocks which are now part of the canonical chain. - a tuple of", "pass @abstractmethod def get_score(self, block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block:", "current TIP block. \"\"\" return self.get_state_machine().block def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: \"\"\"", "\"The Chain class cannot be instantiated with an empty `sm_configuration`\" ) else: #", "for the given block slot number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain", "in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for", "canonical and now are no longer canonical. \"\"\" try: parent_block = self.get_block_by_root(block.previous_block_root) except", "get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock: \"\"\" Return the requested block as specified by", "-> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def", "block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API", "current block slot number. \"\"\" logger = logging.getLogger(\"eth2.beacon.chains.BeaconChain\") chaindb_class = BeaconChainDB # type:", "Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class = self.get_state_machine_class_for_block_slot(slot) block_class = sm_class.block_class return", "= self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block def get_block(self) -> BaseBeaconBlock: \"\"\"", "a tuple of blocks which are now part of the canonical chain. -", "import ( validate_imported_block_unchanged, ) from eth2.beacon.db.chain import ( BaseBeaconChainDB, BeaconChainDB, ) from eth2.beacon.exceptions", "BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of one or more ``StateMachine`` classes.", "there's no block with the given number in the canonical chain. \"\"\" return", "-> BaseBeaconBlock: pass @abstractmethod def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock: pass @abstractmethod def", "if block is None: head = self.get_canonical_head() return self.create_block_from_parent(head, FromBlockParams()) else: return block", "get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: \"\"\" Returns the ``StateMachine`` instance for the given", "the given block slot number. \"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes", "there's no head defined for the canonical chain. \"\"\" block_root = self.chaindb.get_canonical_head_root() block_class", "given block number. \"\"\" block = self.ensure_block(at_block) sm_class = self.get_state_machine_class_for_block_slot(block.slot) return sm_class( chaindb=self.chaindb,", "hash. \"\"\" return self.chaindb.get_score(block_root) def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: \"\"\" Return ``block``", "= cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod def", "...], Tuple[BaseBeaconBlock, ...]]: pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of", "block_class) def get_score(self, block_root: Hash32) -> int: \"\"\" Return the score of the", "\"\"\" Return the ``StateMachine`` class for the given block slot number. \"\"\" if", "block_root: Hash32) -> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass", "#{}. Cannot import block {} before importing \" \"its parent block at {}\".format(", "( BeaconState, ) from eth2.beacon.typing import ( FromBlockParams, Slot, ) from eth2.beacon.validation import", "type(genesis_block), sm_class.block_class ) ) chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_state(genesis_state) return cls._from_genesis_block(base_db, genesis_block) @classmethod def", "API # def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: slot = self.chaindb.get_slot_by_root(block_root) sm_class =", ">= start_slot: return sm_class raise StateMachineNotFound(\"No StateMachine available for block slot: #{0}\".format(slot)) def", "get_state_machine_class( cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']: pass @abstractmethod def get_state_machine(self, at_block: BaseBeaconBlock=None) ->", "Initialize the ``BeaconChain`` from a genesis state. \"\"\" sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block)", "with the given number in the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block(", "if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format(", "cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # # StateMachine API # @classmethod def get_state_machine_class(cls,", "The Chain class acts as a wrapper around these other StateMachine classes, delegating", "base class for all BeaconChain objects \"\"\" chaindb = None # type: BaseBeaconChainDB", "the genesis block. \"\"\" chaindb = cls.get_chaindb_class()(db=base_db) chaindb.persist_block(genesis_block, genesis_block.__class__) return cls(base_db) # #", "block_params: FromBlockParams) -> BaseBeaconBlock: \"\"\" Passthrough helper to the ``StateMachine`` class of the", "- a tuple of blocks which were canonical and now are no longer", "validate_slot(slot) for start_slot, sm_class in reversed(cls.sm_configuration): if slot >= start_slot: return sm_class raise", "-> int: pass @abstractmethod def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock: pass @abstractmethod def", "from eth2.beacon.exceptions import ( BlockClassError, StateMachineNotFound, ) from eth2.beacon.types.blocks import ( BaseBeaconBlock, )", "# # Block API # @abstractmethod def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]: pass", "Now it just persists all state. Should design how to clean up the", "= None # type: BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration =", "pass @abstractmethod def get_canonical_block_root(self, slot: Slot) -> Hash32: pass @abstractmethod def import_block( self,", "Type, ) from eth._utils.datatypes import ( Configurable, ) from eth.db.backends.base import ( BaseAtomicDB,", "base_db: BaseAtomicDB, genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain': \"\"\" Initialize the ``BeaconChain`` from the genesis", "class of the block descending from the given block. \"\"\" return self.get_state_machine_class_for_block_slot( slot=parent_block.slot", "``StateMachine`` class for the given block slot number. \"\"\" if cls.sm_configuration is None:", "no block with the given hash in the db. \"\"\" validate_word(block_root, title=\"Block Hash\")", "Type['BaseBeaconChainDB']: if cls.chaindb_class is None: raise AttributeError(\"`chaindb_class` not set\") return cls.chaindb_class # #", "pass class BeaconChain(BaseBeaconChain): \"\"\" A Chain is a combination of one or more", "\"\"\" if cls.sm_configuration is None: raise AttributeError(\"Chain classes must define the StateMachines in", "BaseBeaconChainDB chaindb_class = None # type: Type[BaseBeaconChainDB] sm_configuration = None # type: Tuple[Tuple[Slot,", ") else: # TODO implment validate_sm_configuration(self.sm_configuration) # validate_sm_configuration(self.sm_configuration) pass self.chaindb = self.get_chaindb_class()(base_db) #", "type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block class: {}, StateMachine.block_class: {}\".format( type(genesis_block),", "\"\"\" return cls.get_state_machine_class_for_block_slot(block.slot) @classmethod def get_state_machine_class_for_block_slot( cls, slot: Slot) -> Type['BaseBeaconStateMachine']: \"\"\" Return", "the canonical chain. \"\"\" return self.chaindb.get_canonical_block_root(slot) def import_block( self, block: BaseBeaconBlock, perform_validation: bool=True", "sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot) if type(genesis_block) != sm_class.block_class: raise BlockClassError( \"Given genesis block class:" ]
[ "device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND,", "line.split(\" \") # print(data) addr = data[0] user = data[1] pwd = data[2]", "SSH Client client = paramiko.SSHClient() # check dir(client) to find available options. #", "output from the device, convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii'))", "IP address to connect to: ') user = input('Username: ') pwd = <PASSWORD>('Password:", "') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from files f1 = open(\"devices.txt\",\"r\")", "send command to the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive", "print(device_access.recv(550000).decode('ascii')) # We can print the same to a file too with open(\"csr1000v.txt\",\"w\")", "f2 = open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data =", "print(data) addr = data[0] user = data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\")", "= open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we", "the device, convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We", "for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE", "# send command to the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) #", "Manually taking input addr = input('Provide IP address to connect to: ') user", "paramiko,time #using as SSH Client client = paramiko.SSHClient() # check dir(client) to find", "from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1: client", "connect to: ') user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking", "COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command to the device device_access.send(\"ter", "options. # auto adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) #", "= client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\"", "it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can print the", "#using as SSH Client client = paramiko.SSHClient() # check dir(client) to find available", "f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW", "# Taking input from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line", "= data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user", "FOR MULTIPLE COMMANDS CODE BELOW # send command to the device device_access.send(\"ter len", "to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can print the same", "client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access = client.invoke_shell() for line", "print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask", "as SSH Client client = paramiko.SSHClient() # check dir(client) to find available options.", "user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from files", "data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\"", "\") # print(data) addr = data[0] user = data[1] pwd = data[2] f3", "convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can print", "find available options. # auto adjust host key verification with yes or no", "format and print it print(device_access.recv(550000).decode('ascii')) # We can print the same to a", "device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE", "f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr =", "# We can print the same to a file too with open(\"csr1000v.txt\",\"w\") as", "for Shell device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output =", "+pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access = client.invoke_shell() for", "We can print the same to a file too with open(\"csr1000v.txt\",\"w\") as f:", "Client client = paramiko.SSHClient() # check dir(client) to find available options. # auto", "print it print(device_access.recv(550000).decode('ascii')) # We can print the same to a file too", "') user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from", "data = line.split(\" \") # print(data) addr = data[0] user = data[1] pwd", "time.sleep(2) # receive output from the device, convert it to byte-like format and", "FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command to the", "remote Cisco IOS \"\"\" Manually taking input addr = input('Provide IP address to", "device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output)", "= data[0] user = data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr", "in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR", "SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command to the device", "to ask for Shell device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1)", "to connect to: ') user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" #", "Taking input from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in", "\\n\") time.sleep(2) # receive output from the device, convert it to byte-like format", "to remote Cisco IOS \"\"\" Manually taking input addr = input('Provide IP address", "# auto adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time", "\"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell", "key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote", "THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send", "= data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \" +pwd)", "have to ask for Shell device_access = client.invoke_shell() for line in f2: device_access.send(line)", "# print(data) addr = data[0] user = data[1] pwd = data[2] f3 =", "Cisco IOS \"\"\" Manually taking input addr = input('Provide IP address to connect", "# check dir(client) to find available options. # auto adjust host key verification", "auto adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for", "= <PASSWORD>('Password: ')\"\"\" # Taking input from files f1 = open(\"devices.txt\",\"r\") f2 =", "CODE BELOW # send command to the device device_access.send(\"ter len 0\\nshow run \\n\")", "import paramiko,time #using as SSH Client client = paramiko.SSHClient() # check dir(client) to", "line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS", "run \\n\") time.sleep(2) # receive output from the device, convert it to byte-like", "we have to ask for Shell device_access = client.invoke_shell() for line in f2:", "ask for Shell device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output", "= input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from files f1", "<PASSWORD>('Password: ')\"\"\" # Taking input from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\")", "or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS \"\"\" Manually", "user = data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+", "time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR", "0\\nshow run \\n\") time.sleep(2) # receive output from the device, convert it to", "available options. # auto adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy())", "#!/usr/local/bin/python3 import paramiko,time #using as SSH Client client = paramiko.SSHClient() # check dir(client)", "input addr = input('Provide IP address to connect to: ') user = input('Username:", "client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr = data[0]", "output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE", "device, convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can", "data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False)", "and print it print(device_access.recv(550000).decode('ascii')) # We can print the same to a file", "user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access", "= paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr = data[0] user", "for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") #", "BELOW # send command to the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2)", "= open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\"", "addr = data[0] user = data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") #", "\"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW #", "= open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy())", "connecting to remote Cisco IOS \"\"\" Manually taking input addr = input('Provide IP", "client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS \"\"\" Manually taking input", "f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE", "client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr = data[0] user = data[1]", "paramiko.SSHClient() # check dir(client) to find available options. # auto adjust host key", "= line.split(\" \") # print(data) addr = data[0] user = data[1] pwd =", "')\"\"\" # Taking input from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for", "# time for connecting to remote Cisco IOS \"\"\" Manually taking input addr", "+\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for", "taking input addr = input('Provide IP address to connect to: ') user =", "input from files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1:", "paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr = data[0] user =", "# receive output from the device, convert it to byte-like format and print", "the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive output from the", "f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) #", "verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco", "len 0\\nshow run \\n\") time.sleep(2) # receive output from the device, convert it", "byte-like format and print it print(device_access.recv(550000).decode('ascii')) # We can print the same to", "to find available options. # auto adjust host key verification with yes or", "check dir(client) to find available options. # auto adjust host key verification with", "files f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1: client =", "line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data)", "IOS \"\"\" Manually taking input addr = input('Provide IP address to connect to:", "# print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to", "address to connect to: ') user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\"", "device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive output from the device, convert", "device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive output from the device,", "it print(device_access.recv(550000).decode('ascii')) # We can print the same to a file too with", "pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \"", "= input('Provide IP address to connect to: ') user = input('Username: ') pwd", "adjust host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting", "with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS", "\" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access = client.invoke_shell()", "open(addr+\".txt\",\"w+\") # print(addr +\" \"+ user +\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have", "IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command to", "client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS", "no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS \"\"\" Manually taking", "from the device, convert it to byte-like format and print it print(device_access.recv(550000).decode('ascii')) #", "f1 = open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient()", "Shell device_access = client.invoke_shell() for line in f2: device_access.send(line) time.sleep(1) output = device_access.recv(55000).decode('ascii')", "to the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive output from", "pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from files f1 = open(\"devices.txt\",\"r\") f2", "+\" \" +pwd) client.connect(addr,username=user,password=<PASSWORD>,allow_agent=False,look_for_keys=False) # we have to ask for Shell device_access =", "to: ') user = input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input", "COMMANDS CODE BELOW # send command to the device device_access.send(\"ter len 0\\nshow run", "receive output from the device, convert it to byte-like format and print it", "= paramiko.SSHClient() # check dir(client) to find available options. # auto adjust host", "yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to remote Cisco IOS \"\"\"", "input('Provide IP address to connect to: ') user = input('Username: ') pwd =", "for connecting to remote Cisco IOS \"\"\" Manually taking input addr = input('Provide", "input('Username: ') pwd = <PASSWORD>('Password: ')\"\"\" # Taking input from files f1 =", "dir(client) to find available options. # auto adjust host key verification with yes", "= device_access.recv(55000).decode('ascii') f3.write(output) \"\"\" THIS CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS", "CODE IS FOR SINGLE COMMAND, FOR MULTIPLE COMMANDS CODE BELOW # send command", "client = paramiko.SSHClient() # check dir(client) to find available options. # auto adjust", "# we have to ask for Shell device_access = client.invoke_shell() for line in", "can print the same to a file too with open(\"csr1000v.txt\",\"w\") as f: f.write(device_access.recv(550000).decode('ascii'))\"\"\"", "MULTIPLE COMMANDS CODE BELOW # send command to the device device_access.send(\"ter len 0\\nshow", "open(\"devices.txt\",\"r\") f2 = open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data", "host key verification with yes or no client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) # time for connecting to", "command to the device device_access.send(\"ter len 0\\nshow run \\n\") time.sleep(2) # receive output", "in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \") # print(data) addr", "\"\"\" Manually taking input addr = input('Provide IP address to connect to: ')", "addr = input('Provide IP address to connect to: ') user = input('Username: ')", "data[0] user = data[1] pwd = data[2] f3 = open(addr+\".txt\",\"w+\") # print(addr +\"", "time for connecting to remote Cisco IOS \"\"\" Manually taking input addr =", "open(\"commands.txt\",\"r\") for line in f1: client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) data = line.split(\" \")" ]
[ "if not (i % 6): # print(i, end=\" \") # 第二道题(17) out_l1 =", "10 # k = n % 10 # if n == i **", "= s1.split(' ') # l2 = [] # for i in l1: #", "# 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ')", "0 global out_l1 for i in l1(): he1 += int(i)**2 if he1 >", "k = n % 10 # if n == i ** 3 +", "for i in l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return", "global out_l1 for i in l1(): he1 += int(i)**2 if he1 > int(str_num):", "# l2 = [] # for i in l1: # if i.isdigit(): #", "l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while 1:", "if i.isdigit(): # l2.append(int(i)) # for i in l2: # if not (i", "he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1", "n % 10 # if n == i ** 3 + j **", "# l1 = s1.split(' ') # l2 = [] # for i in", "3 + j ** 3 + k ** 3: # print(n) # 第一道题(16)", "= [] # for i in l1: # if i.isdigit(): # l2.append(int(i)) #", "in l2: # if not (i % 6): # print(i, end=\" \") #", "== i ** 3 + j ** 3 + k ** 3: #", "') # l2 = [] # for i in l1: # if i.isdigit():", "= n // 100 # j = n // 10 % 10 #", "s1.split(' ') # l2 = [] # for i in l1: # if", "= n % 10 # if n == i ** 3 + j", "out_l1 = [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in", "print(i, end=\" \") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = []", "bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num):", "第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ') #", "# j = n // 10 % 10 # k = n %", "(i % 6): # print(i, end=\" \") # 第二道题(17) out_l1 = [] def", "def jisuan(str_num): he1 = 0 global out_l1 for i in l1(): he1 +=", "j = n // 10 % 10 # k = n % 10", "re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for i in l1(): he1", "[] # for i in l1: # if i.isdigit(): # l2.append(int(i)) # for", "i in l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None", "# 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for", "6): # print(i, end=\" \") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1", "l2: # if not (i % 6): # print(i, end=\" \") # 第二道题(17)", "l1 = s1.split(' ') # l2 = [] # for i in l1:", "返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1", "l2 = [] # for i in l1: # if i.isdigit(): # l2.append(int(i))", "if n == i ** 3 + j ** 3 + k **", "l1: # if i.isdigit(): # l2.append(int(i)) # for i in l2: # if", "in l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while", "# i = n // 100 # j = n // 10 %", "# l2.append(int(i)) # for i in l2: # if not (i % 6):", "= [] # 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1 =", "j ** 3 + k ** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\")", "input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2 =", "# if not (i % 6): # print(i, end=\" \") # 第二道题(17) out_l1", "k ** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\")", "he1 = 0 global out_l1 for i in l1(): he1 += int(i)**2 if", "= n // 10 % 10 # k = n % 10 #", "[] # 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0", "if he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\") nums_l1", "int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\")", "# for n in range(400,500): # i = n // 100 # j", "l2.append(int(i)) # for i in l2: # if not (i % 6): #", "def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in l1: re_l1.append(i) def", "n in range(400,500): # i = n // 100 # j = n", "+ j ** 3 + k ** 3: # print(n) # 第一道题(16) #", "n // 100 # j = n // 10 % 10 # k", "10 # if n == i ** 3 + j ** 3 +", "# print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 =", "range(400,500): # i = n // 100 # j = n // 10", "for i in l1: # if i.isdigit(): # l2.append(int(i)) # for i in", "i = n // 100 # j = n // 10 % 10", "# if n == i ** 3 + j ** 3 + k", "he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\") nums_l1 =", "re_l1 = [] # 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1", "# print(i, end=\" \") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 =", "第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i", "out_l1 for i in l1(): he1 += int(i)**2 if he1 > int(str_num): out_l1.append(str_num)", "= input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2 = [] # for", "# for i in l1: # if i.isdigit(): # l2.append(int(i)) # for i", "% 10 # k = n % 10 # if n == i", "** 3 + k ** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") #", "= [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in l1:", "// 100 # j = n // 10 % 10 # k =", "not (i % 6): # print(i, end=\" \") # 第二道题(17) out_l1 = []", "end=\" \") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = [] #", "10 % 10 # k = n % 10 # if n ==", "i ** 3 + j ** 3 + k ** 3: # print(n)", "\") # 第二道题(17) out_l1 = [] def bian_int_list(l1): re_l1 = [] # 返回出去的列表", "int(str_num): out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\") nums_l1 = in_1.split(' ')", "** 3 + j ** 3 + k ** 3: # print(n) #", "print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 = s1.split('", "in l1: # if i.isdigit(): # l2.append(int(i)) # for i in l2: #", "= 0 global out_l1 for i in l1(): he1 += int(i)**2 if he1", "l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for i in l1():", "100 # j = n // 10 % 10 # k = n", "> int(str_num): out_l1.append(str_num) return None while 1: in_1 = input(\"请输入数值:\") nums_l1 = in_1.split('", "[] def bian_int_list(l1): re_l1 = [] # 返回出去的列表 for i in l1: re_l1.append(i)", "# 返回出去的列表 for i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global", "% 6): # print(i, end=\" \") # 第二道题(17) out_l1 = [] def bian_int_list(l1):", "s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2 = [] #", "i in l2: # if not (i % 6): # print(i, end=\" \")", "for n in range(400,500): # i = n // 100 # j =", "# s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2 = []", "+ k ** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 =", "in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for i in", "jisuan(str_num): he1 = 0 global out_l1 for i in l1(): he1 += int(i)**2", "# for i in l2: # if not (i % 6): # print(i,", "# input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2", "i.isdigit(): # l2.append(int(i)) # for i in l2: # if not (i %", "input(\"请输入(第二次):\") # l1 = s1.split(' ') # l2 = [] # for i", "in range(400,500): # i = n // 100 # j = n //", "# k = n % 10 # if n == i ** 3", "for i in l2: # if not (i % 6): # print(i, end=\"", "# if i.isdigit(): # l2.append(int(i)) # for i in l2: # if not", "n == i ** 3 + j ** 3 + k ** 3:", "i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for i", "// 10 % 10 # k = n % 10 # if n", "for i in l1: re_l1.append(i) def jisuan(str_num): he1 = 0 global out_l1 for", "% 10 # if n == i ** 3 + j ** 3", "i in l1: # if i.isdigit(): # l2.append(int(i)) # for i in l2:", "** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") #", "n // 10 % 10 # k = n % 10 # if", "3 + k ** 3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1", "3: # print(n) # 第一道题(16) # input(\"请输入(第一次):\") # s1 = input(\"请输入(第二次):\") # l1", "+= int(i)**2 if he1 > int(str_num): out_l1.append(str_num) return None while 1: in_1 =" ]
[ "{ return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc acc[key] =", "a note in case this assumption turns out to be false — a", "binary search is much more time-optimal for long lists, but adds a little", "a function that accepts a program and returns a program, plus a priority", "\"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid", "priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer)", "!= newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these", "priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) {", "a program and returns a program, plus a priority level. Higher priority transformers", "won’t be an enormous number of transformer additions, and walk the list linearly", "a binary search is much more time-optimal for long lists, but adds a", "i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority,", "js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return step return [oldname,", "Dagoba.extend(step[1], defaults)] }) }, 100) # these need to run early, so they", "complexity and doesn’t really speed up short lists. \"\"\" class Transformer: def __init__(self):", "transformer function') for(var i = 0; i < Dagoba.T.length; i++) # OPT: binary", "need to run early, so they get a high priority } \"\"\" \"\"\"", "function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var i", "more time-optimal for long lists, but adds a little complexity and doesn’t really", "= defaults || [] # default arguments for the alias Dagoba.addPipetype(newname, function() {})", "return Dagoba.error('Invalid transformer function') for(var i = 0; i < Dagoba.T.length; i++) #", "to the front of the list. We’re ensuring is a function, because we’re", "i < Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i,", "return program \"\"\" Dagoba.T = [] # transformers (more than meets the eye)", "doesn’t really speed up short lists. \"\"\" class Transformer: def __init__(self): self.T =", "because we’re going to evaluate it later 31 . We’ll assume there won’t", "for long lists, but adds a little complexity and doesn’t really speed up", "\"\"\" class Transformer: def __init__(self): self.T = [] def transform(self, program): return program", "(more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof", "there won’t be an enormous number of transformer additions, and walk the list", "# OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun:", "Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) {", "number of transformer additions, and walk the list linearly to add a new", "out to be false — a binary search is much more time-optimal for", "placed closer to the front of the list. We’re ensuring is a function,", "little complexity and doesn’t really speed up short lists. \"\"\" class Transformer: def", "transformers (more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) {", "function that accepts a program and returns a program, plus a priority level.", "# default arguments for the alias Dagoba.addPipetype(newname, function() {}) # because there's no", "adds a little complexity and doesn’t really speed up short lists. \"\"\" class", "Dagoba.error('Invalid transformer function') for(var i = 0; i < Dagoba.T.length; i++) # OPT:", "OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun})", "alias Dagoba.addPipetype(newname, function() {}) # because there's no method catchall in js Dagoba.addTransformer(function(program)", "for the alias Dagoba.addPipetype(newname, function() {}) # because there's no method catchall in", "is a function that accepts a program and returns a program, plus a", "so they get a high priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults)", "[] def transform(self, program): return program \"\"\" Dagoba.T = [] # transformers (more", "one. We’ll leave a note in case this assumption turns out to be", "short lists. \"\"\" class Transformer: def __init__(self): self.T = [] def transform(self, program):", "def __init__(self): self.T = [] def transform(self, program): return program \"\"\" Dagoba.T =", "lists. \"\"\" class Transformer: def __init__(self): self.T = [] def transform(self, program): return", "these need to run early, so they get a high priority } \"\"\"", "if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key] return acc }, list)", "method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return", "assume there won’t be an enormous number of transformer additions, and walk the", "Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key] return", "= function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return", "defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc acc[key]", "if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var i = 0; i", "Dagoba.T = [] # transformers (more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer", "search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\"", "function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc", "in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return step return", "Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname,", "the list. We’re ensuring is a function, because we’re going to evaluate it", "the alias Dagoba.addPipetype(newname, function() {}) # because there's no method catchall in js", "{ if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key] return acc },", "A query transformer is a function that accepts a program and returns a", "that accepts a program and returns a program, plus a priority level. Higher", "return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need to", "new one. We’ll leave a note in case this assumption turns out to", "} \"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc)", "a priority level. Higher priority transformers are placed closer to the front of", "{ return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults)", "} \"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof", "time-optimal for long lists, but adds a little complexity and doesn’t really speed", "We’re ensuring is a function, because we’re going to evaluate it later 31", "an enormous number of transformer additions, and walk the list linearly to add", "assumption turns out to be false — a binary search is much more", "function, because we’re going to evaluate it later 31 . We’ll assume there", "self.T = [] def transform(self, program): return program \"\"\" Dagoba.T = [] #", "it later 31 . We’ll assume there won’t be an enormous number of", "program \"\"\" Dagoba.T = [] # transformers (more than meets the eye) \"\"\"", "no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname)", "[oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need to run early, so", "{ if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100)", "than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun", "return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) {", "{ return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias", "\"\"\" Dagoba.T = [] # transformers (more than meets the eye) \"\"\" \"\"\"", "Dagoba.addPipetype(newname, function() {}) # because there's no method catchall in js Dagoba.addTransformer(function(program) {", "{}) # because there's no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step)", "# these need to run early, so they get a high priority }", "[] # transformers (more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun,", "a new one. We’ll leave a note in case this assumption turns out", "in case this assumption turns out to be false — a binary search", "defaults) { defaults = defaults || [] # default arguments for the alias", "much more time-optimal for long lists, but adds a little complexity and doesn’t", "We’ll assume there won’t be an enormous number of transformer additions, and walk", "catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return step", "because there's no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0]", "early, so they get a high priority } \"\"\" \"\"\" Dagoba.extend = function(list,", "add a new one. We’ll leave a note in case this assumption turns", "|| [] # default arguments for the alias Dagoba.addPipetype(newname, function() {}) # because", "lists, but adds a little complexity and doesn’t really speed up short lists.", "up short lists. \"\"\" class Transformer: def __init__(self): self.T = [] def transform(self,", "step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need to run", "we’re going to evaluate it later 31 . We’ll assume there won’t be", "evaluate it later 31 . We’ll assume there won’t be an enormous number", "!= 'undefined') return acc acc[key] = defaults[key] return acc }, list) } \"\"\"", "if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\" \"\"\"", "long lists, but adds a little complexity and doesn’t really speed up short", "closer to the front of the list. We’re ensuring is a function, because", "— a binary search is much more time-optimal for long lists, but adds", "the list linearly to add a new one. We’ll leave a note in", "{ if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var i = 0;", "of the list. We’re ensuring is a function, because we’re going to evaluate", "returns a program, plus a priority level. Higher priority transformers are placed closer", "# because there's no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) {", "!= 'function') return Dagoba.error('Invalid transformer function') for(var i = 0; i < Dagoba.T.length;", "Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority:", "a function, because we’re going to evaluate it later 31 . We’ll assume", "false — a binary search is much more time-optimal for long lists, but", "= function(newname, oldname, defaults) { defaults = defaults || [] # default arguments", "priority level. Higher priority transformers are placed closer to the front of the", "[] # default arguments for the alias Dagoba.addPipetype(newname, function() {}) # because there's", "and returns a program, plus a priority level. Higher priority transformers are placed", "__init__(self): self.T = [] def transform(self, program): return program \"\"\" Dagoba.T = []", "plus a priority level. Higher priority transformers are placed closer to the front", ". We’ll assume there won’t be an enormous number of transformer additions, and", "class Transformer: def __init__(self): self.T = [] def transform(self, program): return program \"\"\"", "\"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program)", "oldname, defaults) { defaults = defaults || [] # default arguments for the", "= function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } \"\"\"", "the front of the list. We’re ensuring is a function, because we’re going", "= [] def transform(self, program): return program \"\"\" Dagoba.T = [] # transformers", "walk the list linearly to add a new one. We’ll leave a note", "accepts a program and returns a program, plus a priority level. Higher priority", "leave a note in case this assumption turns out to be false —", "arguments for the alias Dagoba.addPipetype(newname, function() {}) # because there's no method catchall", "Transformer: def __init__(self): self.T = [] def transform(self, program): return program \"\"\" Dagoba.T", "break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program)", "run early, so they get a high priority } \"\"\" \"\"\" Dagoba.extend =", "later 31 . We’ll assume there won’t be an enormous number of transformer", "enormous number of transformer additions, and walk the list linearly to add a", "Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform =", "return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias =", "Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined')", "is a function, because we’re going to evaluate it later 31 . We’ll", "going to evaluate it later 31 . We’ll assume there won’t be an", "be an enormous number of transformer additions, and walk the list linearly to", "transformer additions, and walk the list linearly to add a new one. We’ll", "Higher priority transformers are placed closer to the front of the list. We’re", "list. We’re ensuring is a function, because we’re going to evaluate it later", "but adds a little complexity and doesn’t really speed up short lists. \"\"\"", "query transformer is a function that accepts a program and returns a program,", "0, {priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) { return", "}) }, 100) # these need to run early, so they get a", "program, plus a priority level. Higher priority transformers are placed closer to the", "= 0; i < Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority)", "this assumption turns out to be false — a binary search is much", "program): return program \"\"\" Dagoba.T = [] # transformers (more than meets the", "} \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults ||", "defaults = defaults || [] # default arguments for the alias Dagoba.addPipetype(newname, function()", "for(var i = 0; i < Dagoba.T.length; i++) # OPT: binary search if(priority", "binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) }", "list linearly to add a new one. We’ll leave a note in case", "transform(self, program): return program \"\"\" Dagoba.T = [] # transformers (more than meets", "meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun !=", "function(newname, oldname, defaults) { defaults = defaults || [] # default arguments for", "Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1],", "if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) #", "}, 100) # these need to run early, so they get a high", "function() {}) # because there's no method catchall in js Dagoba.addTransformer(function(program) { return", "the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function')", "Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer function')", "= [] # transformers (more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer =", "# transformers (more than meets the eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority)", "Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults || [] # default", "'function') return Dagoba.error('Invalid transformer function') for(var i = 0; i < Dagoba.T.length; i++)", "fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) {", "function') for(var i = 0; i < Dagoba.T.length; i++) # OPT: binary search", "transformers are placed closer to the front of the list. We’re ensuring is", "program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults", "> Dagoba.T[i].priority) break Dagoba.T.splice(i, 0, {priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform", "\"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] !=", "\"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer", "fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return", "return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key]", "front of the list. We’re ensuring is a function, because we’re going to", "newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need", "get a high priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return", "program.map(function(step) { if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)] }) },", "< Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break Dagoba.T.splice(i, 0,", "function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) } \"\"\" \"\"\"", "{ return program.map(function(step) { if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)]", "to evaluate it later 31 . We’ll assume there won’t be an enormous", "level. Higher priority transformers are placed closer to the front of the list.", "is much more time-optimal for long lists, but adds a little complexity and", "a program, plus a priority level. Higher priority transformers are placed closer to", "and doesn’t really speed up short lists. \"\"\" class Transformer: def __init__(self): self.T", "search is much more time-optimal for long lists, but adds a little complexity", "high priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key)", "a little complexity and doesn’t really speed up short lists. \"\"\" class Transformer:", "key) { if(typeof list[key] != 'undefined') return acc acc[key] = defaults[key] return acc", "program and returns a program, plus a priority level. Higher priority transformers are", "\"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) },", "a high priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc,", "\"\"\" \"\"\" Dagoba.extend = function(list, defaults) { return Object.keys(defaults).reduce(function(acc, key) { if(typeof list[key]", "fun != 'function') return Dagoba.error('Invalid transformer function') for(var i = 0; i <", "\"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults || []", "priority transformers are placed closer to the front of the list. We’re ensuring", "turns out to be false — a binary search is much more time-optimal", "speed up short lists. \"\"\" class Transformer: def __init__(self): self.T = [] def", "def transform(self, program): return program \"\"\" Dagoba.T = [] # transformers (more than", "Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc, transformer) { return transformer.fun(acc) }, program) }", "= function(fun, priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var", "are placed closer to the front of the list. We’re ensuring is a", "<filename>graphdb/transformer.py<gh_stars>1-10 \"\"\" A query transformer is a function that accepts a program and", "to be false — a binary search is much more time-optimal for long", "transformer is a function that accepts a program and returns a program, plus", "they get a high priority } \"\"\" \"\"\" Dagoba.extend = function(list, defaults) {", "We’ll leave a note in case this assumption turns out to be false", "eye) \"\"\" \"\"\" Dagoba.addTransformer = function(fun, priority) { if(typeof fun != 'function') return", "i = 0; i < Dagoba.T.length; i++) # OPT: binary search if(priority >", "list[key] != 'undefined') return acc acc[key] = defaults[key] return acc }, list) }", "100) # these need to run early, so they get a high priority", "ensuring is a function, because we’re going to evaluate it later 31 .", "}, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults =", "to run early, so they get a high priority } \"\"\" \"\"\" Dagoba.extend", "linearly to add a new one. We’ll leave a note in case this", "really speed up short lists. \"\"\" class Transformer: def __init__(self): self.T = []", "case this assumption turns out to be false — a binary search is", "priority) { if(typeof fun != 'function') return Dagoba.error('Invalid transformer function') for(var i =", "{ defaults = defaults || [] # default arguments for the alias Dagoba.addPipetype(newname,", "return [oldname, Dagoba.extend(step[1], defaults)] }) }, 100) # these need to run early,", "{priority: priority, fun: fun}) } \"\"\" \"\"\" Dagoba.transform = function(program) { return Dagoba.T.reduce(function(acc,", "\"\"\" A query transformer is a function that accepts a program and returns", "defaults || [] # default arguments for the alias Dagoba.addPipetype(newname, function() {}) #", "additions, and walk the list linearly to add a new one. We’ll leave", "and walk the list linearly to add a new one. We’ll leave a", "to add a new one. We’ll leave a note in case this assumption", "return program.map(function(step) { if(step[0] != newname) return step return [oldname, Dagoba.extend(step[1], defaults)] })", "\"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults = defaults || [] #", "be false — a binary search is much more time-optimal for long lists,", "transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname, defaults) { defaults", "transformer) { return transformer.fun(acc) }, program) } \"\"\" \"\"\" Dagoba.addAlias = function(newname, oldname,", "of transformer additions, and walk the list linearly to add a new one.", "0; i < Dagoba.T.length; i++) # OPT: binary search if(priority > Dagoba.T[i].priority) break", "defaults)] }) }, 100) # these need to run early, so they get", "there's no method catchall in js Dagoba.addTransformer(function(program) { return program.map(function(step) { if(step[0] !=", "default arguments for the alias Dagoba.addPipetype(newname, function() {}) # because there's no method", "31 . We’ll assume there won’t be an enormous number of transformer additions,", "note in case this assumption turns out to be false — a binary" ]
[ "# 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK = 130 SUCCESS =", "= 110 POLLING = 120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功", "130 SUCCESS = 200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE =", "CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY = 300 # 任务重试", "<filename>yzcore/templates/project_template/src/const/_job.py #!/usr/bin/python3.6.8+ # -*- coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ...", "# 任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED", "... \"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100 #", "#!/usr/bin/python3.6.8+ # -*- coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\"", "# -*- coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\" class", "JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS =", "= 200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400 #", "\"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING = 0", "100 # 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK = 130 SUCCESS", "300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED = 500 # 任务撤销", "@auth: cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING = 0 #", "= 300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED = 500 #", "PROCESS = 110 POLLING = 120 CALLBACK = 130 SUCCESS = 200 #", "cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行", "= 100 # 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK = 130", "RETRY = 300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED = 500", "# 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING = 120", "SUCCESS = 200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400", "任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK = 130 SUCCESS = 200", "coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING", "class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS", "\"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100 # 任务执行开始", "= 130 SUCCESS = 200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE", "STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK =", "0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING =", "= 120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY = 300", "120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY = 300 #", "-*- coding:utf-8 -*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object):", "POLLING = 120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY =", "任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING = 120 CALLBACK", "@date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED", "200 # 任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400 # 任务执行失败", "2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED =", "= 0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110 POLLING", "@desc: ... \"\"\" class JobStatus(object): PENDING = 0 # 任务等待执行 STARTED = 100", "PENDING = 0 # 任务等待执行 STARTED = 100 # 任务执行开始 PROCESS = 110", "110 POLLING = 120 CALLBACK = 130 SUCCESS = 200 # 任务执行成功 RETRY", "任务执行成功 RETRY = 300 # 任务重试 FAILURE = 400 # 任务执行失败 REVOKED =", "-*- \"\"\" @auth: cml @date: 2020-12-2 @desc: ... \"\"\" class JobStatus(object): PENDING =" ]
[ "boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc", "erro de nao uso do zip_longest else: from itertools import zip_longest DIGITS =", "def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este método diversas vezes", "'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'],", "podem ser implementadas no futuro com a mesma interface, para fazer output em", "boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor", "= data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento =", "= boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] =", "class. :param landscape: Formato da folha. Usar ``True`` para boleto tipo carnê. \"\"\"", "def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr,", "= boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] =", "fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt = txt.replace('.',", "itertools import chain if sys.version_info < (3,): from itertools import izip_longest as zip_longest", "as zip_longest zip_longest # chamando para evitar erro de nao uso do zip_longest", "def drawCanhoto(self, html): if html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>'", "self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html", "from itertools import chain if sys.version_info < (3,): from itertools import izip_longest as", "arquivo ou *file-like* class. :param landscape: Formato da folha. Usar ``True`` para boleto", "do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData`", "= self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções", "'n s', 'n', 'n s'] if len(code) % 2 != 0: code =", "de nao uso do zip_longest else: from itertools import zip_longest DIGITS = [", "= 27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr = file_descr if", "= boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] =", "boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y')", "boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero()", "Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento", "tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade']", "tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento']", "base64 from itertools import chain if sys.version_info < (3,): from itertools import izip_longest", "self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s',", "% 2 != 0: code = '0' + code for digt1, digt2 in", "de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def", "tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html) def printPage(self): self.html +=", "dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html +=", "map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n'])", "'<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados):", "[ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w',", "tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado']", "= data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process =", "self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html) def printPage(self):", "# Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read()", "'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do", "tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] =", "tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data)", "tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] =", "início de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e", "\"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if", "digits.extend(['w', 'n s', 'n']) result = [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit))", "return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo de", "class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe é responsável por imprimir", "\"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html)", "+= '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html +=", "pode chamar este método diversas vezes para criar um arquivo com várias páginas,", "= '0' + code for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)]", "['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w',", "de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força", "< (3,): from itertools import izip_longest as zip_longest zip_longest # chamando para evitar", "no futuro com a mesma interface, para fazer output em LaTeX, etc ...", "= data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] =", "if len(code) % 2 != 0: code = '0' + code for digt1,", "_formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt = txt.replace('.', ',') else: txt", ":class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho", "self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd:", "data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento']", "carnê com 2 boletos por página. :param boletoDados1: Objeto com os dados do", "BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe é responsável por imprimir o", "code = '0' + code for digt1, digt2 in self._grouper(2, code): digt1_repr =", "'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente']", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do", "'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w',", "boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento)", "'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n',", ":param landscape: Formato da folha. Usar ``True`` para boleto tipo carnê. \"\"\" def", ":class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = ''", "image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo de página", "s', 'n']) result = [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result)", "img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento']", "digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x +", "file_descr: Um arquivo ou *file-like* class. :param landscape: Formato da folha. Usar ``True``", "arquivo com várias páginas, uma por boleto. :param boletoDados: Objeto com os dados", "= nfloat txt = txt.replace('.', ',') else: txt = \"\" return txt def", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self):", "boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\"", "save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'):", "open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat", "DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'],", "zip_longest zip_longest # chamando para evitar erro de nao uso do zip_longest else:", "= boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente", "unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] =", "NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este método", "para evitar erro de nao uso do zip_longest else: from itertools import zip_longest", "'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n',", "ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData`", "'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if", "ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl =", "self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto bancário'", "tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def", "for more details. \"\"\" import os import string import sys import codecs import", "import zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n',", "boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />'", "= DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr)))", "= boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = ''", "nfloat): if nfloat: txt = nfloat txt = txt.replace('.', ',') else: txt =", "'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n',", "for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None):", "'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w',", "'' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64", "/>' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto com", "\"\"\"Imprime um boleto tipo carnê com 2 boletos por página. :param boletoDados1: Objeto", "boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] =", "= data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] =", "-*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto em", "27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape:", "tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento']", "mesma interface, para fazer output em LaTeX, etc ... Esta classe pode imprimir", "para boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em px", "tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process", "12 self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...')", "def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self):", "'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w',", "self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html += '</div><div", "= '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def", "self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime", "este método diversas vezes para criar um arquivo com várias páginas, uma por", "digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr,", "data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if", "tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de", "tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc", "'<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado)", "tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento']", "nfloat txt = txt.replace('.', ',') else: txt = \"\" return txt def _codigoBarraI25(self,", "img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento']", "= boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] =", "tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento']", "dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1:", "boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width", "boleto em html. :copyright: © 2012 by <NAME> :license: BSD, see LICENSE for", "file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title,", "= base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] =", "Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if", "© 2012 by <NAME> :license: BSD, see LICENSE for more details. \"\"\" import", "',') else: txt = \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de", "tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor", "boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type", "vezes para criar um arquivo com várias páginas, uma por boleto. :param boletoDados:", "a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2:", "+ ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = []", "s', 'n', 'n s'] if len(code) % 2 != 0: code = '0'", "self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12 self.title", "+= tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o", "'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n',", "de folha cheia. :param file_descr: Um arquivo ou *file-like* class. :param landscape: Formato", "Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé", "result = [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self,", "txt.replace('.', ',') else: txt = \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código", "# Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento =", "folha. Usar ``True`` para boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): #", "boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y')", "['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n',", "= boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor =", "constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr,", "http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n', 'n s'] if len(code) %", "= string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir", "o Recibo do Caixa :param boletoDados: Objeto com os dados do boleto a", "com os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData`", ":class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode", "ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data =", "boleto em HTML. Outras classes podem ser implementadas no futuro com a mesma", "de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {}", "data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie']", "boletos em formato de carnê (2 boletos por página) ou em formato de", "class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>' if", "(3,): from itertools import izip_longest as zip_longest zip_longest # chamando para evitar erro", "Objeto com os dados do boleto a ser preenchido. Deve ser subclasse de", "HTML. Outras classes podem ser implementadas no futuro com a mesma interface, para", "Recibo do Sacado para modelo de página inteira :param boletoDados: Objeto com os", "= valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] +=", "img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] =", "Outras classes podem ser implementadas no futuro com a mesma interface, para fazer", "# Tamanhos em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9", "= tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir,", "def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args", "páginas, uma por boleto. :param boletoDados: Objeto com os dados do boleto a", "tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] =", "['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML", "tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira']", "n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)]", "# Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) #", "folha cheia. :param file_descr: Um arquivo ou *file-like* class. :param landscape: Formato da", "# Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html):", "boleto. :param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve", ":license: BSD, see LICENSE for more details. \"\"\" import os import string import", "do Sacado para modelo de página inteira :param boletoDados: Objeto com os dados", "= '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] =", "do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto", "with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt =", "img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img']", "by <NAME> :license: BSD, see LICENSE for more details. \"\"\" import os import", "uso do zip_longest else: from itertools import zip_longest DIGITS = [ ['n', 'n',", "por imprimir o boleto em HTML. Outras classes podem ser implementadas no futuro", "com 2 boletos por página. :param boletoDados1: Objeto com os dados do boleto", "formato de folha cheia. :param file_descr: Um arquivo ou *file-like* class. :param landscape:", "várias páginas, uma por boleto. :param boletoDados: Objeto com os dados do boleto", "logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo", "from itertools import izip_longest as zip_longest zip_longest # chamando para evitar erro de", "desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def", "unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente']", "tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info']", "boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode", "aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel']", "uma por boleto. :param boletoDados: Objeto com os dados do boleto a ser", "'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento", "boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento", "_drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa", "= 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue =", "<gh_stars>0 # -*- coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer", "= string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img", "base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente']", "'n', 'n s'] if len(code) % 2 != 0: code = '0' +", "tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes']", "'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n',", "Usar ``True`` para boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos", "tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor']", "boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc", "'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n',", "boletos por página. :param boletoDados1: Objeto com os dados do boleto a ser", ":type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img']", "!= 0: code = '0' + code for digt1, digt2 in self._grouper(2, code):", "2012 by <NAME> :license: BSD, see LICENSE for more details. \"\"\" import os", "página) ou em formato de folha cheia. :param file_descr: Um arquivo ou *file-like*", "inteira :param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve", "HTML Esta classe é responsável por imprimir o boleto em HTML. Outras classes", "NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle)", "'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl =", "Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html +=", "= 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo", "boletoDados1: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse", "tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media',", "iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)] *", "'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w',", "tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format']", "``True`` para boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em", "pode imprimir boletos em formato de carnê (2 boletos por página) ou em", "chamando para evitar erro de nao uso do zip_longest else: from itertools import", "'r') as tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir =", "boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] =", "else: from itertools import zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'],", "html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None):", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\"", "boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y')", "code): \"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n',", "boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2 boletos por página. :param boletoDados1:", "_load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def", "futuro com a mesma interface, para fazer output em LaTeX, etc ... Esta", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados)", "fazer o output do boleto em html. :copyright: © 2012 by <NAME> :license:", "html. :copyright: © 2012 by <NAME> :license: BSD, see LICENSE for more details.", "if sys.version_info < (3,): from itertools import izip_longest as zip_longest zip_longest # chamando", "= 12 self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em", "width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir,", "= boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] =", "nextPage(self): \"\"\"Força início de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha", "tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco']", "criar um arquivo com várias páginas, uma por boleto. :param boletoDados: Objeto com", "arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as", "'n']) result = [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def", "tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template):", "'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n',", "= 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente", "os import string import sys import codecs import base64 from itertools import chain", "font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with", "def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto com os", "boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento)", "def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do", "para criar um arquivo com várias páginas, uma por boleto. :param boletoDados: Objeto", "in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG',", "self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html", "\"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if", "do Caixa :param boletoDados: Objeto com os dados do boleto a ser preenchido.", "valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm)", "code for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda", "Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s',", "_load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r')", "= file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html =", "tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value", "\"\"\"Imprime Boleto Convencional Você pode chamar este método diversas vezes para criar um", "formato de carnê (2 boletos por página) ou em formato de folha cheia.", ":param boletoDados1: Objeto com os dados do boleto a ser preenchido. Deve ser", "tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento =", "fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)] * n", "drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este método diversas vezes para", "raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine,", "'</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def", "= boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em", "a mesma interface, para fazer output em LaTeX, etc ... Esta classe pode", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data", "otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n', 'n s']", "digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x", "= boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y')", "<NAME> :license: BSD, see LICENSE for more details. \"\"\" import os import string", "coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do", "\"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto em html.", "'n s'] if len(code) % 2 != 0: code = '0' + code", "data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo']", "# value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8')", "= os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content = tpl.read() return", "def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto =", "= boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc =", "= valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = ''", "página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\"", "'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n',", "DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for digit in", "= self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in", "boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira", ":param file_descr: Um arquivo ou *file-like* class. :param landscape: Formato da folha. Usar", "boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento", "boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto com os dados do", "subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto a ser", "DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w',", "txt = txt.replace('.', ',') else: txt = \"\" return txt def _codigoBarraI25(self, code):", "+= '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def", "'<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2 boletos", "['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n',", "self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime", "+= '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self,", "*file-like* class. :param landscape: Formato da folha. Usar ``True`` para boleto tipo carnê.", "a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados)", "self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html +=", "txt = \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado", "'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto", "= boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] =", "a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl", "def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path,", "def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo de página inteira", "= base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo", "tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento']", "px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27", "+= tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html) def printPage(self): self.html", "boleto tipo carnê com 2 boletos por página. :param boletoDados1: Objeto com os", "Convencional Você pode chamar este método diversas vezes para criar um arquivo com", "printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê", "= boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] =", "img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco", "tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data)", "tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info']", "drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2 boletos por página.", "hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat):", "digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for digit in digits: result.append('<span", "from itertools import zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w',", "# Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em", "dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2:", "= boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] =", "classe pode imprimir boletos em formato de carnê (2 boletos por página) ou", ":class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho", "with open(template_path, 'r') as tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image):", "'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'],", "em formato de folha cheia. :param file_descr: Um arquivo ou *file-like* class. :param", "self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue", "'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w',", "data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento", "= boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] =", "diversas vezes para criar um arquivo com várias páginas, uma por boleto. :param", "de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html:", ":type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional", "if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self,", "data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento", "método diversas vezes para criar um arquivo com várias páginas, uma por boleto.", "um arquivo com várias páginas, uma por boleto. :param boletoDados: Objeto com os", "'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt", "'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe", "__init__(self, file_descr, landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto = 0", "boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y')", "boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2 boletos por página. :param", "str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto", "= self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html)", "\"\"\"Geração do Boleto em HTML Esta classe é responsável por imprimir o boleto", "boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova", "def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt = txt.replace('.', ',') else:", "boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este método diversas vezes para criar", "['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w',", "boletos por página) ou em formato de folha cheia. :param file_descr: Um arquivo", "de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n',", "= data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo", "+= '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] +=", "0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto", "tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image:", "import izip_longest as zip_longest zip_longest # chamando para evitar erro de nao uso", "tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento']", "ou em formato de folha cheia. :param file_descr: Um arquivo ou *file-like* class.", "x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result", "return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5", "digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x')", "import base64 from itertools import chain if sys.version_info < (3,): from itertools import", "de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} #", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento')", "'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'],", "+= '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2", "['n', 'n s', 'n', 'n s'] if len(code) % 2 != 0: code", "modelo de página inteira :param boletoDados: Objeto com os dados do boleto a", "\"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto", "= '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras", "sys import codecs import base64 from itertools import chain if sys.version_info < (3,):", "codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64", "as tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__))", "pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl:", "boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento", "nao uso do zip_longest else: from itertools import zip_longest DIGITS = [ ['n',", "boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else:", "= 9 self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr", "evitar erro de nao uso do zip_longest else: from itertools import zip_longest DIGITS", "def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path", "tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] =", "\"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)] * n return", "in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr", "template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content = tpl.read()", "Sacado para modelo de página inteira :param boletoDados: Objeto com os dados do", "self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape: raise", "font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates',", "por página) ou em formato de folha cheia. :param file_descr: Um arquivo ou", "import string import sys import codecs import base64 from itertools import chain if", "\"\"\" import os import string import sys import codecs import base64 from itertools", "= boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] =", "boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') #", "subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {}", "boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo de página inteira :param boletoDados:", "linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode)", ":param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve ser", "{} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux =", "'templates', template) with open(template_path, 'r') as tpl: template_content = tpl.read() return template_content def", "self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def", "\"\"\"Imprime o Recibo do Sacado para modelo de página inteira :param boletoDados: Objeto", "data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite']", "= boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] =", "= 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12 self.title =", "pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto em html. :copyright:", "base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel", "return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF", "string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img =", "import codecs import base64 from itertools import chain if sys.version_info < (3,): from", "em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento", "cheia. :param file_descr: Um arquivo ou *file-like* class. :param landscape: Formato da folha.", "data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode", "= boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] =", "('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento =", "valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm", "image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo", "self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo:", "por página. :param boletoDados1: Objeto com os dados do boleto a ser preenchido.", "por boleto. :param boletoDados: Objeto com os dados do boleto a ser preenchido.", "tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc", "os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o", "\"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html", "boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor)", "data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente']", "carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width = 750", "Esta classe é responsável por imprimir o boleto em HTML. Outras classes podem", "os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type", "Tamanhos em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine", "= codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] =", "len(code) % 2 != 0: code = '0' + code for digt1, digt2", "ser implementadas no futuro com a mesma interface, para fazer output em LaTeX,", "string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir =", "= os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do", "implementadas no futuro com a mesma interface, para fazer output em LaTeX, etc", "'' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self):", "= boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] =", "para fazer output em LaTeX, etc ... Esta classe pode imprimir boletos em", "um boleto tipo carnê com 2 boletos por página. :param boletoDados1: Objeto com", "em html. :copyright: © 2012 by <NAME> :license: BSD, see LICENSE for more", "izip_longest as zip_longest zip_longest # chamando para evitar erro de nao uso do", "['w', 'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w',", "else: txt = \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras", "= boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento", "0: code = '0' + code for digt1, digt2 in self._grouper(2, code): digt1_repr", "['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object):", "ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data =", "'0' + code for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr", "self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param", "landscape: Formato da folha. Usar ``True`` para boleto tipo carnê. \"\"\" def __init__(self,", "com a mesma interface, para fazer output em LaTeX, etc ... Esta classe", "de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} #", "ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto a", "# -*- coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o", "= valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] +=", "responsável por imprimir o boleto em HTML. Outras classes podem ser implementadas no", "valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc #", "data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade", "= boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] =", "Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código", "do zip_longest else: from itertools import zip_longest DIGITS = [ ['n', 'n', 'w',", "em formato de carnê (2 boletos por página) ou em formato de folha", "tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo']", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html'))", "= img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] =", "tipo carnê com 2 boletos por página. :param boletoDados1: Objeto com os dados", "= ['n', 'n s', 'n', 'n s'] if len(code) % 2 != 0:", "else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt", "template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return", "'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n',", "da folha. Usar ``True`` para boleto tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False):", "preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html'))", "if html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1,", "'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'],", "em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento']", "'' for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode']", "else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self,", ":class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self,", "tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux =", "ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine()", "'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'],", "boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero()", "\"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto com os dados do boleto", "= os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime", "drawCanhoto(self, html): if html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def", "sys.version_info < (3,): from itertools import izip_longest as zip_longest zip_longest # chamando para", "tipo carnê. \"\"\" def __init__(self, file_descr, landscape=False): # Tamanhos em px self.width =", "if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] =", "\"\"\"Força início de nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto", "= boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento,", "tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path", "Responsável por fazer o output do boleto em html. :copyright: © 2012 by", "digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3,", "else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento", "~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto em html. :copyright: ©", "= boletoDados.especie tpl_data['quantidade'] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc =", "por fazer o output do boleto em html. :copyright: © 2012 by <NAME>", "'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe é responsável", "in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html", "['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n',", "barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html", "boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Demonstrativo tpl_data['demonstrativo'] = '' for", "for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for", "barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n', 'n", "if nfloat: txt = nfloat txt = txt.replace('.', ',') else: txt = \"\"", "= img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco # Corpo tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente", "imprimir o boleto em HTML. Outras classes podem ser implementadas no futuro com", "template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path =", "'w', 'n', 'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n',", "output em LaTeX, etc ... Esta classe pode imprimir boletos em formato de", "string import sys import codecs import base64 from itertools import chain if sys.version_info", "= img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento", "isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente", "= '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux)", "em HTML Esta classe é responsável por imprimir o boleto em HTML. Outras", "value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento'] = boletoDados.local_pagamento.encode ('utf-8') else:", "'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n',", "zip_longest else: from itertools import zip_longest DIGITS = [ ['n', 'n', 'w', 'w',", "o boleto em HTML. Outras classes podem ser implementadas no futuro com a", "do boleto em html. :copyright: © 2012 by <NAME> :license: BSD, see LICENSE", "self.title = 'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else:", "tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc #", "'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'], ['n', 'n',", "valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao)", "template) with open(template_path, 'r') as tpl: template_content = tpl.read() return template_content def _load_image(self,", "boletoDados2: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse", "tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image:", "return template_content def _load_image(self, logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image)", "aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco #", "\"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar", "ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados", "Você pode chamar este método diversas vezes para criar um arquivo com várias", ":class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\"", "raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este", "tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] =", "os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content =", "itertools import zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n',", "file_descr, landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle", "boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com", "pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados):", "'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w',", "boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento tpl_data['especie_documento'] = boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite", "] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe é responsável por", "nova página\"\"\" self.html += '</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o", "as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt =", "landscape=False): # Tamanhos em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle =", "tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html += '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo", "result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') -->", "2 boletos por página. :param boletoDados1: Objeto com os dados do boleto a", "txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\"", "self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de nova página\"\"\" self.html += '</div><div class=\"pagina\">'", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise", "digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for digit in digits:", ":param boletoDados2: Objeto com os dados do boleto a ser preenchido. Deve ser", "for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x:", "de carnê (2 boletos por página) ou em formato de folha cheia. :param", "os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param", "in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in", ":class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início", "em LaTeX, etc ... Esta classe pode imprimir boletos em formato de carnê", "code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)])", "valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for", "= tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path", "output do boleto em html. :copyright: © 2012 by <NAME> :license: BSD, see", "'n', 'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n',", "tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html +=", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine()", "carnê (2 boletos por página) ou em formato de folha cheia. :param file_descr:", "ou *file-like* class. :param landscape: Formato da folha. Usar ``True`` para boleto tipo", "bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html'))", "aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco']", "# Demonstrativo tpl_data['demonstrativo'] = '' for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html", "tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] =", "+= '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html)", "\"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n", "Caixa :param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve", "= boletoDados.especie_documento tpl_data['aceite'] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] =", "boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] = boletoDados.especie", "# Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) #", "height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template)", "fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat: txt = nfloat txt = txt.replace('.', ',')", "imprimir boletos em formato de carnê (2 boletos por página) ou em formato", ":class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = ''", "em px self.width = 750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine =", "'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração", "tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento']", "boletoDados.agencia_conta_cedente tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0]", "Formato da folha. Usar ``True`` para boleto tipo carnê. \"\"\" def __init__(self, file_descr,", "_drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para modelo de página inteira :param", "instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado", "\"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para boletos", "BSD, see LICENSE for more details. \"\"\" import os import string import sys", "open(template_path, 'r') as tpl: template_content = tpl.read() return template_content def _load_image(self, logo_image): pyboleto_dir", "string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img =", "'n'], ['n', 'n', 'w', 'n', 'w'], ['w', 'n', 'w', 'n', 'n'], ['n', 'w',", "def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo", "'</div><div class=\"pagina\">' def save(self): \"\"\"Fecha boleto e constroi o arquivo\"\"\" self.html += '</div></body></html>'", "= os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content", "etc ... Esta classe pode imprimir boletos em formato de carnê (2 boletos", "= [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n,", "os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado", "+= str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um", "do Boleto em HTML Esta classe é responsável por imprimir o boleto em", "self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__))", ":type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def nextPage(self): \"\"\"Força início de", "boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 = 'data:image/jpeg;base64,{0}'.format(aux)", "de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto a ser preenchido.", "(2 boletos por página) ou em formato de folha cheia. :param file_descr: Um", "boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['numero_documento'] = boletoDados.numero_documento", "a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os", "'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)] * n return zip_longest(fillvalue=fillvalue, *args)", "' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for", "'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'],", "def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits", "zip_longest DIGITS = [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n',", "Classe Responsável por fazer o output do boleto em html. :copyright: © 2012", "logo_image): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) image_path = os.path.join(pyboleto_dir, 'media', logo_image) return image_path def _drawReciboSacado(self,", "_drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto com os dados", "Boleto Convencional Você pode chamar este método diversas vezes para criar um arquivo", "tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image))", "see LICENSE for more details. \"\"\" import os import string import sys import", "em HTML. Outras classes podem ser implementadas no futuro com a mesma interface,", "details. \"\"\" import os import string import sys import codecs import base64 from", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data", "'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta", "+ code for digt1, digt2 in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr =", "para modelo de página inteira :param boletoDados: Objeto com os dados do boleto", "750 self.widthCanhoto = 0 self.fontSizeTitle = 9 self.heightLine = 27 self.fontSizeValue = 12", "if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width,", ":type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" tpl = string.Template(self._load_template('recibo_caixa.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img']", "Um arquivo ou *file-like* class. :param landscape: Formato da folha. Usar ``True`` para", "['w', 'n', 'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n',", "more details. \"\"\" import os import string import sys import codecs import base64", "9 self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr =", "'<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados: Objeto", "'n', 'n', 'w'], ['n', 'w', 'n', 'n', 'w'], ['w', 'w', 'n', 'n', 'n'],", "do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData`", "img_base64 = 'data:image/jpeg;base64,{0}'.format(aux) tpl_data['logo_img'] = img_base64 tpl_data['codigo_dv_banco'] = boletoDados.codigo_dv_banco tpl_data['linha_digitavel'] = boletoDados.linha_digitavel #", "html): if html: self.html += str(html) def printPage(self): self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self,", "_codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits =", "template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path = os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as", "= string.Template(self._load_template('recibo_sacado.html')) tpl_data = {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img", "+= '<hr />' def _drawReciboCaixa(self, boletoDados): \"\"\"Imprime o Recibo do Caixa :param boletoDados:", "o Recibo do Sacado para modelo de página inteira :param boletoDados: Objeto com", "= \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime Código de barras otimizado para", "boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode):", "zip_longest # chamando para evitar erro de nao uso do zip_longest else: from", "= [ ['n', 'n', 'w', 'w', 'n'], ['w', 'n', 'n', 'n', 'w'], ['n',", "boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n', 'n s'] if len(code)", "itertools import izip_longest as zip_longest zip_longest # chamando para evitar erro de nao", "de página inteira :param boletoDados: Objeto com os dados do boleto a ser", "''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\"", ":copyright: © 2012 by <NAME> :license: BSD, see LICENSE for more details. \"\"\"", "self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w') as fd: fd.write(self.html) def _formataValorParaExibir(self, nfloat): if nfloat:", "x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result =", "LICENSE for more details. \"\"\" import os import string import sys import codecs", "import os import string import sys import codecs import base64 from itertools import", "'n s', 'n']) result = [] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return", "self._formataValorParaExibir(boletoDados.valor) tpl_data['valor'] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes']", "utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output do boleto", "dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados:", "Deve ser subclasse de :class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto", ":class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto", "# chamando para evitar erro de nao uso do zip_longest else: from itertools", "tpl_data['cedente_documento'] = boletoDados.cedente_documento data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format']", "página. :param boletoDados1: Objeto com os dados do boleto a ser preenchido. Deve", "Esta classe pode imprimir boletos em formato de carnê (2 boletos por página)", "boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] = data_documento.strftime('%d/%m/%Y') tpl_data['cedente_endereco'] = boletoDados.cedente_endereco", "LaTeX, etc ... Esta classe pode imprimir boletos em formato de carnê (2", "classe é responsável por imprimir o boleto em HTML. Outras classes podem ser", "[] for digit in digits: result.append('<span class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable,", "import chain if sys.version_info < (3,): from itertools import izip_longest as zip_longest zip_longest", "digt2_repr = map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n", "página inteira :param boletoDados: Objeto com os dados do boleto a ser preenchido.", "Recibo do Caixa :param boletoDados: Objeto com os dados do boleto a ser", "= self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in", "boletoDados: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse", "Boleto em HTML Esta classe é responsável por imprimir o boleto em HTML.", "for dm in boletoDados.demonstrativo: tpl_data['demonstrativo'] += '<p>{0}</p>'.format(dm) self.html += tpl.substitute(tpl_data) def _drawHorizontalCorteLine(self): self.html", "chamar este método diversas vezes para criar um arquivo com várias páginas, uma", "boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você", "if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux = base64.b64encode(aux) img_base64 =", "valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao", "s'] if len(code) % 2 != 0: code = '0' + code for", "class=\"{0}\"></span>'.format(digit)) return ''.join(result) def _grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC", "s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s', 'n']) result = [] for digit", "o output do boleto em html. :copyright: © 2012 by <NAME> :license: BSD,", "os.path.join(pyboleto_dir, 'templates', template) with open(template_path, 'r') as tpl: template_content = tpl.read() return template_content", "ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em", "= {} # Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux", "_grouper(self, n, iterable, fillvalue=None): \"\"\"grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args =", "self.heightLine = 27 self.fontSizeValue = 12 self.title = 'Boleto bancário' self.fileDescr = file_descr", ":class:`pyboleto.data.BoletoData` :param boletoDados2: Objeto com os dados do boleto a ser preenchido. Deve", "2 != 0: code = '0' + code for digt1, digt2 in self._grouper(2,", "chain if sys.version_info < (3,): from itertools import izip_longest as zip_longest zip_longest #", "tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py if isinstance(boletoDados.local_pagamento, unicode): tpl_data['local_pagamento']", "self.html += '<script>window.print();</script>' def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com", "é responsável por imprimir o boleto em HTML. Outras classes podem ser implementadas", "o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with open(self.fileDescr, 'w')", "... Esta classe pode imprimir boletos em formato de carnê (2 boletos por", "tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue, height_line=self.heightLine, font_size_title=self.fontSizeTitle) def _load_template(self, template): pyboleto_dir = os.path.dirname(os.path.abspath(__file__)) template_path =", "boletoDados.local_pagamento.encode ('utf-8') else: tpl_data['local_pagamento'] = boletoDados.local_pagamento tpl_data['cedente'] = boletoDados.cedente tpl_data['agencia_conta_cedente'] = boletoDados.agencia_conta_cedente data_documento", "'<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self,", "import sys import codecs import base64 from itertools import chain if sys.version_info <", "'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n', 'w', 'n'],", "digits = ['n', 'n s', 'n', 'n s'] if len(code) % 2 !=", "'media', logo_image) return image_path def _drawReciboSacado(self, boletoDados): \"\"\"Imprime o Recibo do Sacado para", "boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = '' for linha_sacado in boletoDados.sacado:", "boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] = self._codigoBarraI25(boletoDados.barcode) self.html +=", "desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime Boleto Convencional Você pode chamar este método diversas", "'' for instrucao in boletoDados.instrucoes: tpl_data['instrucoes'] += '<p>{0}</p>'.format(instrucao) # Rodapé tpl_data['sacado_info'] = ''", "e constroi o arquivo\"\"\" self.html += '</div></body></html>' if hasattr(self.fileDescr, 'write'): self.fileDescr.write(self.html) else: with", "landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl = string.Template(self._load_template('head.html')) self.html = tpl.substitute(title=self.title, width=self.width, font_size_value=self.fontSizeValue,", "classes podem ser implementadas no futuro com a mesma interface, para fazer output", "codecs import base64 from itertools import chain if sys.version_info < (3,): from itertools", "= 'Boleto bancário' self.fileDescr = file_descr if landscape: raise NotImplementedError('Em desenvolvimento...') else: tpl", "self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data) def drawCanhoto(self, html): if html: self.html += str(html) def", "for linha_sacado in boletoDados.sacado: tpl_data['sacado_info'] += '<p>{0}</p>'.format(linha_sacado) # Código de barras tpl_data['barcode'] =", "txt = nfloat txt = txt.replace('.', ',') else: txt = \"\" return txt", "= boletoDados.data_processamento tpl_data['data_processamento'] = data_process.strftime('%d/%m/%Y') tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['carteira'] = boletoDados.carteira tpl_data['especie'] =", "com várias páginas, uma por boleto. :param boletoDados: Objeto com os dados do", ":type boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados):", "-*- coding: utf-8 -*- \"\"\" pyboleto.html ~~~~~~~~~~~~~ Classe Responsável por fazer o output", "self._formataValorParaExibir(boletoDados.valor_documento) tpl_data['valor_documento'] = valor_doc # Instruções tpl_data['instrucoes'] = '' for instrucao in boletoDados.instrucoes:", "ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` \"\"\" self._drawReciboSacado(boletoDados) self._drawHorizontalCorteLine() self._drawReciboCaixa(boletoDados) self._drawHorizontalCorteLine() def", "= map(lambda x: x + ' s', DIGITS[int(digt2)]) digits.extend(chain(*zip(digt1_repr, digt2_repr))) digits.extend(['w', 'n s',", "Cabeçalho tpl_data['logo_img'] = '' if boletoDados.logo_image: img = codecs.open(self._load_image(boletoDados.logo_image)) aux = img.read() aux", "fazer output em LaTeX, etc ... Esta classe pode imprimir boletos em formato", "'n'], ['n', 'w', 'n', 'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em", "'ABCDEFG', 'x') --> ABC DEF Gxx\"\"\" args = [iter(iterable)] * n return zip_longest(fillvalue=fillvalue,", "data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento']", "para boletos http://en.wikipedia.org/wiki/Interleaved_2_of_5 \"\"\" digits = ['n', 'n s', 'n', 'n s'] if", "'w', 'n'], ] class BoletoHTML(object): \"\"\"Geração do Boleto em HTML Esta classe é", "'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ] class", "['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w', 'n', 'n',", "def drawBoletoCarneDuplo(self, boletoDados1, boletoDados2=None): \"\"\"Imprime um boleto tipo carnê com 2 boletos por", "= boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento = boletoDados.data_documento tpl_data['data_documento'] =", "in self._grouper(2, code): digt1_repr = DIGITS[int(digt1)] digt2_repr = map(lambda x: x + '", "'w', 'w'], ['w', 'n', 'n', 'w', 'n'], ['n', 'w', 'n', 'w', 'n'], ]", "\"\"\" digits = ['n', 'n s', 'n', 'n s'] if len(code) % 2", "= txt.replace('.', ',') else: txt = \"\" return txt def _codigoBarraI25(self, code): \"\"\"Imprime", "'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w', 'w'], ['w',", "boletoDados1: :class:`pyboleto.data.BoletoData` :type boletoDados2: :class:`pyboleto.data.BoletoData` \"\"\" raise NotImplementedError('Em desenvolvimento') def drawBoleto(self, boletoDados): \"\"\"Imprime", "Corpo data_vencimento = boletoDados.data_vencimento tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') # value em unicode em data.py", "tpl_data['data_vencimento'] = data_vencimento.strftime('%d/%m/%Y') tpl_data['sacado'] = boletoDados.sacado[0] tpl_data['nosso_numero_format'] = boletoDados.format_nosso_numero() tpl_data['numero_documento'] = boletoDados.numero_documento data_documento", "nfloat: txt = nfloat txt = txt.replace('.', ',') else: txt = \"\" return", "'n', 'w', 'n', 'n'], ['n', 'w', 'w', 'n', 'n'], ['n', 'n', 'n', 'w',", "interface, para fazer output em LaTeX, etc ... Esta classe pode imprimir boletos" ]
[ "i = 0 while i < 5: summary = summary + i print(summary)", "< 5: summary = summary + i print(summary) i = i + 1", "0 i = 0 while i < 5: summary = summary + i", "= 0 i = 0 while i < 5: summary = summary +", "= 0 while i < 5: summary = summary + i print(summary) i", "i < 5: summary = summary + i print(summary) i = i +", "while i < 5: summary = summary + i print(summary) i = i", "summary = 0 i = 0 while i < 5: summary = summary", "0 while i < 5: summary = summary + i print(summary) i =" ]
[ "import imtreat img = imtreat.imageManagerClass.openImageFunction(\"../images/soleil.png\", 0) img = imtreat.definedModesClass.detailEnhanceFunction(img) imtreat.imageManagerClass.saveImageFunction(\"/Téléchargements/\", \"image_1\", \".png\", img)" ]
[ "of a Windows share. Related options: * \"instances_path\": The directory which will be", "Possible values: * Name of the qemu-img executable, in case it is in", "min=0, help=\"\"\" Wait soft reboot seconds Number of seconds to wait for instance", "Unless required by applicable law or agreed to in writing, software # distributed", "default=[], help=\"\"\" List of iSCSI initiators that will be used for estabilishing iSCSI", "- Windows / Hyper-V Server 2012 R2 [1, 8] - Windows / Hyper-V", "with different CPU features and checked during instance creation in order to limit", "with the following flavor extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable", "Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count", "seconds (Default: 5). Related options: * This options is meaningful when volume_attach_retry_count is", "Apache License, Version 2.0 (the \"License\"); you may # not use this file", "the License. You may obtain # a copy of the License at #", "CPU features and checked during instance creation in order to limit the CPU", "drive (default) or as a CD drive. Related options: * This option is", "* Time interval between attachment attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval',", "Hyper-V, you must set the ``mkisofs_cmd`` value to the full path to an", "with the License. You may obtain # a copy of the License at", "iSCSI or FC disks. This requires the Multipath IO Windows feature to be", "one DirectX 11 capable graphics adapter for Windows / Hyper-V Server 2012 R2", "to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive. When enabled,", "values (Default: 10). Related options: * Time interval between attachment attempts is declared", "options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe The timeframe to", "target host. If left blank, an administrative share (hidden network share) will be", "config drive as a CD drive. OpenStack can be configured to write instance", "qemu-img command qemu-img is required for some of the image related operations like", "image. Related options: * This option is meaningful when used with other options", "64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when", "using Hyper-V's metric APIs. Collected data can be retrieved by other apps and", "/ Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only available on Windows", "will be used (Default). * Name of a Windows share. Related options: *", "\"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive. When enabled, the admin", "options is meaningful when volume_attach_retry_count is greater than 1. * The retry loop", "the specified timeframe. Possible values: * Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval',", "the mounted_disk_query_retry_count is greater than 1. * The retry loop runs with mounted_disk_query_retry_count", "the physical network. In addition, Hyper-V Virtual Switch provides policy enforcement for security,", "Related options: * This option is meaningful with ``force_config_drive`` option set to ``True``", "be enabled. MPIO must be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[],", "help=\"\"\" Volume attach retry interval Interval between volume attachment attempts, in seconds. Possible", "you may # not use this file except in compliance with the License.", "**os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600,", "meaningful with ``force_config_drive`` option set to ``True`` or when the REST API call", "attach retry count The number of times to retry attaching a volume. Volume", "is meaningful when volume_attach_retry_count is greater than 1. * The retry loop runs", "assigned to an instance and its startup RAM amount. For example a ratio", "* Positive integer values (Default: 10). Related options: * Time interval between attachment", "values: * Time in seconds (Default: 5). Related options: * This option is", "limit the CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\"", "drive as the config drive image. * To use config drive with Hyper-V,", "number of times to retry checking for a mounted disk. The query runs", "within the specified timeframe. Possible values: * Timeframe in seconds (Default: 60). \"\"\"),", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "the full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch", "options: * This option is meaningful when used with other options that enable", "``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V Virtual Switch", "be available from the config drive image. Related options: * This option is", "for an instance by using Hyper-V's metric APIs. Collected data can be retrieved", "* Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds", "between the total RAM assigned to an instance and its startup RAM amount.", "this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables", "the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config", "\"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe The timeframe to be", "service or its path is in the PATH environment variable (Default). * Path", "to an instance and its startup RAM amount. For example a ratio of", "events to the given value. This option enhances the internal lifecycle notifications of", "with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval Interval", "otherwise the config drive will remain an ISO. To use config drive with", "has to change this value. Possible values: * Time in seconds (Default: 2).", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "1.0: Disables dynamic memory allocation (Default). * Float values greater than 1.0: Enables", "value to the full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External", "change this value. Possible values: * Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd',", "min=0, help=\"\"\" Volume attach retry count The number of times to retry attaching", "min=0, help=\"\"\" Power state check timeframe The timeframe to be checked for instance", "You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "must set the ``qemu_img_cmd`` value to the full path to an ``qemu-img`` command", "timeframe to be checked for instance power state changes. This option is used", "* Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive", "of available vswitches is used. This list is queried using WQL. * Virtual", "values greater than 1.0: Enables allocation of total implied RAM divided by this", "the \"instances_path\" dir and used by the resize feature to copy files to", "a config drive, which is then attached to the instance before it boots.", "will remain an ISO. To use config drive with Hyper-V, you must set", "following flavor extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768,", "queried using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait", "config drive. When enabled, the admin password will be available from the config", "External virtual switch name The Hyper-V Virtual Switch is a software-based layer-2 Ethernet", "is queried using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\"", "* Float values greater than 1.0: Enables allocation of total implied RAM divided", "* Positive integer values. Values greater than 1 is recommended (Default: 10). Related", "this window. Possible values: * Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False,", "query runs until the device can be found or the retry count is", "success or the given retry count is reached. Possible values: * Positive integer", "option to the default value. Possible values: * Name of the qemu-img executable,", "Hyper-V feature', help=\"\"\" The hyperv feature allows you to configure the Hyper-V hypervisor", "option is meaningful when used with other options that enable config drive usage", "recommended (Default: 10). Related options: * Time interval between disk mount retries is", "\"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds Number of seconds to", "are specified, the Microsoft iSCSI initiator service will choose the initiator. \"\"\") ]", "hyperv feature allows you to configure the Hyper-V hypervisor driver to be used", "attaching a volume. Volume attachment is retried until success or the given retry", "size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available", "the instance before it boots. The config drive can be attached as a", "within this window. Possible values: * Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom',", "retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0,", "switch that is available with the installation of the Hyper-V server role. The", "* This options is meaningful when volume_attach_retry_count is greater than 1. * The", "deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic", "Server 2012 R2 or newer and RDS-Virtualization feature has to be enabled. Instances", "installation of the Hyper-V server role. The switch includes programmatically managed and extensible", "an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio", "Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds Number", "configuration drive by setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\"", "files to the target host. If left blank, an administrative share (hidden network", "is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag is", "\"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry count The number of", "PATH environment variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: *", "5). Related options: * This option is meaningful when the mounted_disk_query_retry_count is greater", "help=\"\"\" Mounted disk query retry interval Interval between checks for a mounted disk,", "Possible values: * Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img", "attachment attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume", "instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry count The number", "left blank, an administrative share (hidden network share) will be used, looking for", "2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for some of", "Volume attach retry count The number of times to retry attaching a volume.", "is then attached to the instance before it boots. The config drive can", "if this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU", "drive. When enabled, the admin password will be available from the config drive", "seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for", "than 1.0: Enables allocation of total implied RAM divided by this value for", "cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry count The number of times", "a CD drive. OpenStack can be configured to write instance metadata to a", "allows you to configure the Hyper-V hypervisor driver to be used within an", "to the full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual", "in the PATH environment variable and leave this option to the default value.", "of monitors. Acceptable values:: [1, 4] - Windows / Hyper-V Server 2012 R2", "the CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted", "always create a configuration drive by setting the ``force_config_drive`` option to ``True``. \"\"\"),", "of qemu-img.exe or set its path in the PATH environment variable and leave", "notifications of instances that reboot themselves. It is unlikely that an operator has", "agreed to in writing, software # distributed under the License is distributed on", "Inject password to config drive. When enabled, the admin password will be available", "help=\"\"\" Inject password to config drive. When enabled, the admin password will be", "given retry count is reached. Possible values: * Positive integer values (Default: 10).", "cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching iSCSI or FC disks. This", "configure the Compute service to always create a configuration drive by setting the", "the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. Additionally, you", "initiator service will choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group)", "isolation, and service levels. The vSwitch represented by this config option must be", "values: * Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power", "(c) 2016 <NAME> # All Rights Reserved. # # Licensed under the Apache", "value. Possible values: * Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\"", "Volume attach retry interval Interval between volume attachment attempts, in seconds. Possible values:", "be enabled. Instances with RemoteFX can be requested with the following flavor extra", "to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that", "iSCSI initiators that will be used for estabilishing iSCSI sessions. If none are", "iSCSI sessions. If none are specified, the Microsoft iSCSI initiator service will choose", "Power state check timeframe The timeframe to be checked for instance power state", "help=\"\"\" Use multipath connections when attaching iSCSI or FC disks. This requires the", "It is unlikely that an operator has to change this value. Possible values:", "to configure the Hyper-V hypervisor driver to be used within an OpenStack deployment.", "/ Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature has to be", "on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable", "network share) will be used, looking for the same \"instances_path\" used locally. Possible", "is reached. Possible values: * Positive integer values. Values greater than 1 is", "for an instance with 1024MB of RAM implies 512MB of RAM allocated at", "\"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry interval Interval between checks", "<NAME> # All Rights Reserved. # # Licensed under the Apache License, Version", "by setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password", "to in writing, software # distributed under the License is distributed on an", "1. * The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"),", "Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "Possible values: * Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount", "the first of a list of available vswitches is used. This list is", "value to the full path to an ``mkisofs.exe`` installation. Additionally, you must set", "under the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature',", "VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160``", "RAM amount. For example a ratio of 2.0 for an instance with 1024MB", "drive. OpenStack can be configured to write instance metadata to a config drive,", "must set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation.", "timeframe. Possible values: * Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0,", "The directory which will be used if this option here is left blank.", "found or the retry count is reached. Possible values: * Positive integer values.", "= [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation (ballooning)", "2.0 for an instance with 1024MB of RAM implies 512MB of RAM allocated", "is retried until success or the given retry count is reached. Possible values:", "disk. The query runs until the device can be found or the retry", "disks. This requires the Multipath IO Windows feature to be enabled. MPIO must", "required for some of the image related operations like converting between different image", "values:: [1, 4] - Windows / Hyper-V Server 2012 R2 [1, 8] -", "command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option is False, qemu-img will", "request is made. We fall back to hard reboot if instance does not", "Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160", "\"License\"); you may # not use this file except in compliance with the", "volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval Interval between", "number of monitors. Acceptable values:: [1, 4] - Windows / Hyper-V Server 2012", "with 1024MB of RAM implies 512MB of RAM allocated at startup. Possible values:", "service to always create a configuration drive by setting the ``force_config_drive`` option to", "within an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory", "to convert the ISO to a VHD, otherwise the config drive will remain", "give the full path of qemu-img.exe or set its path in the PATH", "* \"\": An administrative share will be used (Default). * Name of a", "drive. Related options: * This option is meaningful with ``force_config_drive`` option set to", "not use this file except in compliance with the License. You may obtain", "shutdown within this window. Possible values: * Time in seconds (Default: 60). \"\"\"),", "operator has to change this value. Possible values: * Time in seconds (Default:", "here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/)", "operations like converting between different image types. You can get it from here:", "options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires at least one", "10). Related options: * Time interval between disk mount retries is declared with", "config drive can be attached as a disk drive (default) or as a", "This flag is needed to support live migration to hosts with different CPU", "is meaningful when used with other options that enable config drive usage with", "* Time in seconds (Default: 5). Related options: * This options is meaningful", "drive by setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject", "greater than 1. * The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration", "values: * Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config", "Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only available on", "apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The", "seconds to wait for instance to shut down after soft reboot request is", "* Name of a Windows share. Related options: * \"instances_path\": The directory which", "machines to both virtual networks and the physical network. In addition, Hyper-V Virtual", "or you can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically", "the given value. This option enhances the internal lifecycle notifications of instances that", "value. Possible values: * Name of the qemu-img executable, in case it is", "Disables dynamic memory allocation (Default). * Float values greater than 1.0: Enables allocation", "convert the ISO to a VHD, otherwise the config drive will remain an", "Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name of a Windows", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "feature', help=\"\"\" The hyperv feature allows you to configure the Hyper-V hypervisor driver", "an instance and its startup RAM amount. For example a ratio of 2.0", "in the PATH environment variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related", "config drive, which is then attached to the instance before it boots. The", "This option is meaningful when the mounted_disk_query_retry_count is greater than 1. * The", "help=\"\"\" Enable RemoteFX feature This requires at least one DirectX 11 capable graphics", "claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that will", "some of the image related operations like converting between different image types. You", "is used to fetch the state of the instance from Hyper-V through the", "attach retry interval Interval between volume attachment attempts, in seconds. Possible values: *", "Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only available on Windows /", "of a list of available vswitches is used. This list is queried using", "cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling interval Instance power state change", "qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option is False, qemu-img", "available with the installation of the Hyper-V server role. The switch includes programmatically", "capabilities to connect virtual machines to both virtual networks and the physical network.", "* This option is meaningful with ``force_config_drive`` option set to ``True`` or when", "used to fetch the state of the instance from Hyper-V through the WMI", "set to ``True`` or when the REST API call to create an instance", "enable config drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0,", "after soft reboot request is made. We fall back to hard reboot if", "feature This requires at least one DirectX 11 capable graphics adapter for Windows", "flag is needed to support live migration to hosts with different CPU features", "features This flag is needed to support live migration to hosts with different", "must be an external one (not internal or private). Possible values: * If", "values: * Time in seconds (Default: 5). Related options: * This options is", "full path to an ``qemu-img`` command installation. * You can configure the Compute", "the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "4] - Windows / Hyper-V Server 2012 R2 [1, 8] - Windows /", "limitations # under the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The", "this option to the default value. Possible values: * Name of the qemu-img", "1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows / Hyper-V Server", "on Windows / Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512, 1024", "as a CD drive. OpenStack can be configured to write instance metadata to", "Mounted disk query retry count The number of times to retry checking for", "IO Windows feature to be enabled. MPIO must be configured to claim such", "connections when attaching iSCSI or FC disks. This requires the Multipath IO Windows", "instance metadata to a config drive, which is then attached to the instance", "Server 2012 R2 [1, 8] - Windows / Hyper-V Server 2016 **os:vram**. Guest", "is meaningful with ``force_config_drive`` option set to ``True`` or when the REST API", "the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry count The", "implied RAM divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable", "volume. Volume attachment is retried until success or the given retry count is", "resize feature to copy files to the target host. If left blank, an", "# All Rights Reserved. # # Licensed under the Apache License, Version 2.0", "governing permissions and limitations # under the License. from oslo_config import cfg hyperv_opt_group", "RAM divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance", "options: * Time interval between disk mount retries is declared with \"mounted_disk_query_retry_interval\" option.", "Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this config", "reboot request is made. We fall back to hard reboot if instance does", "is False, qemu-img will be used to convert the ISO to a VHD,", "in seconds (Default: 5). Related options: * This options is meaningful when volume_attach_retry_count", "the device can be found or the retry count is reached. Possible values:", "Collected data can be retrieved by other apps and services, e.g.: Ceilometer. \"\"\"),", "express or implied. See the # License for the specific language governing permissions", "License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The", "for a mounted disk, in seconds. Possible values: * Time in seconds (Default:", "drive as a CD drive. OpenStack can be configured to write instance metadata", "with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry interval", "[1, 4] - Windows / Hyper-V Server 2012 R2 [1, 8] - Windows", "to be checked for instance power state changes. This option is used to", "is available with the installation of the Hyper-V server role. The switch includes", "\"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry interval Interval", "Volume attachment is retried until success or the given retry count is reached.", "If not provided, the first of a list of available vswitches is used.", "unlikely that an operator has to change this value. Possible values: * Time", "creation in order to limit the CPU features used by the instance. \"\"\"),", "from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv", "default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for some of the image related", "of instances that reboot themselves. It is unlikely that an operator has to", "different CPU features and checked during instance creation in order to limit the", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "the WMI interface, within the specified timeframe. Possible values: * Timeframe in seconds", "expresses the ratio between the total RAM assigned to an instance and its", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "power state events to the given value. This option enhances the internal lifecycle", "\"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory", "For example a ratio of 2.0 for an instance with 1024MB of RAM", "metrics collections for an instance by using Hyper-V's metric APIs. Collected data can", "wait for instance to shut down after soft reboot request is made. We", "allocation (Default). * Float values greater than 1.0: Enables allocation of total implied", "path to an ``qemu-img`` command installation. * You can configure the Compute service", "either express or implied. See the # License for the specific language governing", "help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation (ballooning) when set to a", "* Name of the qemu-img executable, in case it is in the same", "count The number of times to retry attaching a volume. Volume attachment is", "drive can be attached as a disk drive (default) or as a CD", "as a disk drive (default) or as a CD drive. Related options: *", "``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive. When enabled, the", "1. * The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"),", "The query runs until the device can be found or the retry count", "2012 R2 or newer and RDS-Virtualization feature has to be enabled. Instances with", "Instances with RemoteFX can be requested with the following flavor extra specs: **os:resolution**.", "between different image types. You can get it from here: (http://qemu.weilnetz.de/) or you", "help=\"\"\" Enable instance metrics collection Enables metrics collections for an instance by using", "configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires at least", "than 1. * The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options.", "mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted", "Related options: * This option is meaningful when used with other options that", "If left blank, an administrative share (hidden network share) will be used, looking", "an instance with 1024MB of RAM implies 512MB of RAM allocated at startup.", "values: * If not provided, the first of a list of available vswitches", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "path share The name of a Windows share mapped to the \"instances_path\" dir", "set to a value greater than 1. The value expresses the ratio between", "(Default: 10). Related options: * Time interval between attachment attempts is declared with", "by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection", "which will be used if this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features',", "OR CONDITIONS OF ANY KIND, either express or implied. See the # License", "obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "be used, looking for the same \"instances_path\" used locally. Possible values: * \"\":", "to copy files to the target host. If left blank, an administrative share", "Switch provides policy enforcement for security, isolation, and service levels. The vSwitch represented", "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may #", "VRAM amount. Only available on Windows / Hyper-V Server 2016. Acceptable values:: 64,", "VM VRAM amount. Only available on Windows / Hyper-V Server 2016. Acceptable values::", "specified, the Microsoft iSCSI initiator service will choose the initiator. \"\"\") ] def", "when attaching iSCSI or FC disks. This requires the Multipath IO Windows feature", "count is reached. Possible values: * Positive integer values (Default: 10). Related options:", "will be used if this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False,", "Ethernet network switch that is available with the installation of the Hyper-V server", "disk query retry count The number of times to retry checking for a", "RAM assigned to an instance and its startup RAM amount. For example a", "feature to copy files to the target host. If left blank, an administrative", "a CD drive. Related options: * This option is meaningful with ``force_config_drive`` option", "option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This", "``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value to the full path", "networks and the physical network. In addition, Hyper-V Virtual Switch provides policy enforcement", "Multipath IO Windows feature to be enabled. MPIO must be configured to claim", "capable graphics adapter for Windows / Hyper-V Server 2012 R2 or newer and", "may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "and limitations # under the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\",", "other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share", "the config_drive_cdrom option is False, qemu-img will be used to convert the ISO", "to retry attaching a volume. Volume attachment is retried until success or the", "and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires", "to change this value. Possible values: * Time in seconds (Default: 2). \"\"\"),", "/ Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io',", "this value. Possible values: * Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\",", "metric APIs. Collected data can be retrieved by other apps and services, e.g.:", "options: * Time interval between attachment attempts is declared with volume_attach_retry_interval option. \"\"\"),", "* If not provided, the first of a list of available vswitches is", "help=\"\"\" External virtual switch name The Hyper-V Virtual Switch is a software-based layer-2", "interval between attachment attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0,", "full path of qemu-img.exe or set its path in the PATH environment variable", "or when the REST API call to create an instance will have ``--config-drive=True``", "Name of the qemu-img executable, in case it is in the same directory", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "Virtual Switch is a software-based layer-2 Ethernet network switch that is available with", "retry interval Interval between checks for a mounted disk, in seconds. Possible values:", "you must set the ``qemu_img_cmd`` value to the full path to an ``qemu-img``", "(not internal or private). Possible values: * If not provided, the first of", "* Time interval between disk mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"),", "here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag", "directory as the nova-compute service or its path is in the PATH environment", "Related options: * This options is meaningful when volume_attach_retry_count is greater than 1.", "to an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value to the", "You can get it from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase", "virtual switch name The Hyper-V Virtual Switch is a software-based layer-2 Ethernet network", "retry count is reached. Possible values: * Positive integer values (Default: 10). Related", "volume_attach_retry_count is greater than 1. * The retry loop runs with volume_attach_retry_count and", "of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option is False,", "* This option is meaningful when the mounted_disk_query_retry_count is greater than 1. *", "count is reached. Possible values: * Positive integer values. Values greater than 1", "is unlikely that an operator has to change this value. Possible values: *", "All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "config drive image. Related options: * This option is meaningful when used with", "You can configure the Compute service to always create a configuration drive by", "Positive integer values. Values greater than 1 is recommended (Default: 10). Related options:", "Enable RemoteFX feature This requires at least one DirectX 11 capable graphics adapter", "is in the same directory as the nova-compute service or its path is", "CD drive. Related options: * This option is meaningful with ``force_config_drive`` option set", "Server 2016 **os:vram**. Guest VM VRAM amount. Only available on Windows / Hyper-V", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "memory ratio Enables dynamic memory allocation (ballooning) when set to a value greater", "state event polling interval Instance power state change event polling frequency. Sets the", "instance power state changes. This option is used to fetch the state of", "and used by the resize feature to copy files to the target host.", "is a software-based layer-2 Ethernet network switch that is available with the installation", "qemu-img.exe or set its path in the PATH environment variable and leave this", "for a mounted disk. The query runs until the device can be found", "Hyper-V through the WMI interface, within the specified timeframe. Possible values: * Timeframe", "path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The", "applicable law or agreed to in writing, software # distributed under the License", "option is meaningful with ``force_config_drive`` option set to ``True`` or when the REST", "mounted disk. The query runs until the device can be found or the", "full path to an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value", "fetch the state of the instance from Hyper-V through the WMI interface, within", "power state changes. This option is used to fetch the state of the", "the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\"", "the config drive will remain an ISO. To use config drive with Hyper-V,", "newer and RDS-Virtualization feature has to be enabled. Instances with RemoteFX can be", "the full path to an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd``", "To use config drive with Hyper-V, you must set the ``mkisofs_cmd`` value to", "a software-based layer-2 Ethernet network switch that is available with the installation of", "same directory as the nova-compute service or its path is in the PATH", "and the physical network. In addition, Hyper-V Virtual Switch provides policy enforcement for", "get it from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V", "of seconds to wait for instance to shut down after soft reboot request", "1. The value expresses the ratio between the total RAM assigned to an", "to connect virtual machines to both virtual networks and the physical network. In", "values: * Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command", "used to convert the ISO to a VHD, otherwise the config drive will", "writing, software # distributed under the License is distributed on an \"AS IS\"", "Time interval between disk mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval',", "reached. Possible values: * Positive integer values (Default: 10). Related options: * Time", "case it is in the same directory as the nova-compute service or its", "Possible values: * Time in seconds (Default: 5). Related options: * This options", "``3840x2160`` is only available on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM", "state change event polling frequency. Sets the listener interval for power state events", "addition, Hyper-V Virtual Switch provides policy enforcement for security, isolation, and service levels.", "its path is in the PATH environment variable (Default). * Path of qemu-img", "same \"instances_path\" used locally. Possible values: * \"\": An administrative share will be", "This option enhances the internal lifecycle notifications of instances that reboot themselves. It", "the Multipath IO Windows feature to be enabled. MPIO must be configured to", "* Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option", "instance with 1024MB of RAM implies 512MB of RAM allocated at startup. Possible", "is needed to support live migration to hosts with different CPU features and", "it from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V Compute", "This option is meaningful when used with other options that enable config drive", "Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as", "requested with the following flavor extra specs: **os:resolution**. Guest VM screen resolution size.", "instance before it boots. The config drive can be attached as a disk", "5). Related options: * This options is meaningful when volume_attach_retry_count is greater than", "Hyper-V's metric APIs. Collected data can be retrieved by other apps and services,", "state changes. This option is used to fetch the state of the instance", "frequency. Sets the listener interval for power state events to the given value.", "an ISO. To use config drive with Hyper-V, you must set the ``mkisofs_cmd``", "until success or the given retry count is reached. Possible values: * Positive", "flavor extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024,", "first of a list of available vswitches is used. This list is queried", "a Windows share mapped to the \"instances_path\" dir and used by the resize", "default=\"\", help=\"\"\" Instances path share The name of a Windows share mapped to", "used for estabilishing iSCSI sessions. If none are specified, the Microsoft iSCSI initiator", "* Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img", "can either give the full path of qemu-img.exe or set its path in", "retry interval Interval between volume attachment attempts, in seconds. Possible values: * Time", "to be used within an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0,", "APIs. Collected data can be retrieved by other apps and services, e.g.: Ceilometer.", "declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval", "values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows", "11 capable graphics adapter for Windows / Hyper-V Server 2012 R2 or newer", "# Unless required by applicable law or agreed to in writing, software #", "setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to", "by applicable law or agreed to in writing, software # distributed under the", "2560x1600, 3840x2160 ``3840x2160`` is only available on Windows / Hyper-V Server 2016. **os:monitors**.", "WMI interface, within the specified timeframe. Possible values: * Timeframe in seconds (Default:", "greater than 1.0: Enables allocation of total implied RAM divided by this value", "to write instance metadata to a config drive, which is then attached to", "DirectX 11 capable graphics adapter for Windows / Hyper-V Server 2012 R2 or", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "count The number of times to retry checking for a mounted disk. The", "by the resize feature to copy files to the target host. If left", "the admin password will be available from the config drive image. Related options:", "Guest VM VRAM amount. Only available on Windows / Hyper-V Server 2016. Acceptable", "integer values. Values greater than 1 is recommended (Default: 10). Related options: *", "made. We fall back to hard reboot if instance does not shutdown within", "service will choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def", "runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX", "of total implied RAM divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False,", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT #", "# not use this file except in compliance with the License. You may", "Hyper-V Virtual Switch is a software-based layer-2 Ethernet network switch that is available", "retried until success or the given retry count is reached. Possible values: *", "option is meaningful when the mounted_disk_query_retry_count is greater than 1. * The retry", "the REST API call to create an instance will have ``--config-drive=True`` flag. *", "MPIO must be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List", "(Default). * Name of a Windows share. Related options: * \"instances_path\": The directory", "values: * \"\": An administrative share will be used (Default). * Name of", "default=False, help=\"\"\" Enable instance metrics collection Enables metrics collections for an instance by", "and leave this option to the default value. Possible values: * Name of", "available on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors.", "**os:monitors**. Guest VM number of monitors. Acceptable values:: [1, 4] - Windows /", "Version 2.0 (the \"License\"); you may # not use this file except in", "to the target host. If left blank, an administrative share (hidden network share)", "with other options that enable config drive usage with Hyper-V, such as ``force_config_drive``.", "monitors. Acceptable values:: [1, 4] - Windows / Hyper-V Server 2012 R2 [1,", "polling frequency. Sets the listener interval for power state events to the given", "OpenStack can be configured to write instance metadata to a config drive, which", "you to configure the Hyper-V hypervisor driver to be used within an OpenStack", "An administrative share will be used (Default). * Name of a Windows share.", "the ``qemu_img_cmd`` value to the full path to an ``qemu-img`` command installation. *", "change event polling frequency. Sets the listener interval for power state events to", "the full path to an ``qemu-img`` command installation. * You can configure the", "server role. The switch includes programmatically managed and extensible capabilities to connect virtual", "the config drive image. Related options: * This option is meaningful when used", "will be used, looking for the same \"instances_path\" used locally. Possible values: *", "given value. This option enhances the internal lifecycle notifications of instances that reboot", "Copyright (c) 2016 <NAME> # All Rights Reserved. # # Licensed under the", "``config_drive_format`` option must be set to ``iso9660`` in order to use CD drive", "Instances path share The name of a Windows share mapped to the \"instances_path\"", "metadata to a config drive, which is then attached to the instance before", "1024MB of RAM implies 512MB of RAM allocated at startup. Possible values: *", "will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set to ``iso9660`` in", "the given retry count is reached. Possible values: * Positive integer values (Default:", "through the WMI interface, within the specified timeframe. Possible values: * Timeframe in", "to ``True`` or when the REST API call to create an instance will", "10). Related options: * Time interval between attachment attempts is declared with volume_attach_retry_interval", "in seconds. Possible values: * Time in seconds (Default: 5). Related options: *", "installation. * You can configure the Compute service to always create a configuration", "checked for instance power state changes. This option is used to fetch the", "help=\"\"\" Wait soft reboot seconds Number of seconds to wait for instance to", "\"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag is needed to support", "OF ANY KIND, either express or implied. See the # License for the", "cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count The number of times to", "Time in seconds (Default: 5). Related options: * This options is meaningful when", "metrics collection Enables metrics collections for an instance by using Hyper-V's metric APIs.", "default=10, min=0, help=\"\"\" Volume attach retry count The number of times to retry", "administrative share will be used (Default). * Name of a Windows share. Related", "to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V", "list of available vswitches is used. This list is queried using WQL. *", "Enables dynamic memory allocation (ballooning) when set to a value greater than 1.", "only available on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number of", "disk drive (default) or as a CD drive. Related options: * This option", "permissions and limitations # under the License. from oslo_config import cfg hyperv_opt_group =", "a mounted disk. The query runs until the device can be found or", "seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling interval", "other options that enable config drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"),", "a disk drive (default) or as a CD drive. Related options: * This", "share will be used (Default). * Name of a Windows share. Related options:", "during instance creation in order to limit the CPU features used by the", "of iSCSI initiators that will be used for estabilishing iSCSI sessions. If none", "Possible values: * Positive integer values. Values greater than 1 is recommended (Default:", "values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections", "values. Values greater than 1 is recommended (Default: 10). Related options: * Time", "(Default: 5). Related options: * This options is meaningful when volume_attach_retry_count is greater", "an instance by using Hyper-V's metric APIs. Collected data can be retrieved by", "Time interval between attachment attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5,", "The hyperv feature allows you to configure the Hyper-V hypervisor driver to be", "proper path for this config option. You can either give the full path", "support live migration to hosts with different CPU features and checked during instance", "The switch includes programmatically managed and extensible capabilities to connect virtual machines to", "help=\"\"\" List of iSCSI initiators that will be used for estabilishing iSCSI sessions.", "query retry interval Interval between checks for a mounted disk, in seconds. Possible", "memory allocation (ballooning) when set to a value greater than 1. The value", "state events to the given value. This option enhances the internal lifecycle notifications", "interval Interval between volume attachment attempts, in seconds. Possible values: * Time in", "administrative share (hidden network share) will be used, looking for the same \"instances_path\"", "you can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets", "* Time in seconds (Default: 5). Related options: * This option is meaningful", "Mount config drive as a CD drive. OpenStack can be configured to write", "switch name The Hyper-V Virtual Switch is a software-based layer-2 Ethernet network switch", "the Hyper-V hypervisor driver to be used within an OpenStack deployment. \"\"\") hyperv_opts", "check timeframe The timeframe to be checked for instance power state changes. This", "can be attached as a disk drive (default) or as a CD drive.", "migration to hosts with different CPU features and checked during instance creation in", "path is in the PATH environment variable (Default). * Path of qemu-img command", "**os:vram**. Guest VM VRAM amount. Only available on Windows / Hyper-V Server 2016.", "help=\"\"\" Mount config drive as a CD drive. OpenStack can be configured to", "Guest VM number of monitors. Acceptable values:: [1, 4] - Windows / Hyper-V", "of times to retry attaching a volume. Volume attachment is retried until success", "interval Interval between checks for a mounted disk, in seconds. Possible values: *", "60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a CD drive. OpenStack", "can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the", "live migration to hosts with different CPU features and checked during instance creation", "share) will be used, looking for the same \"instances_path\" used locally. Possible values:", "greater than 1. The value expresses the ratio between the total RAM assigned", "Values greater than 1 is recommended (Default: 10). Related options: * Time interval", "soft reboot seconds Number of seconds to wait for instance to shut down", "polling interval Instance power state change event polling frequency. Sets the listener interval", "mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check", "1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching iSCSI or FC", "be used if this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\"", "Windows feature to be enabled. MPIO must be configured to claim such devices.", "in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required", "of RAM implies 512MB of RAM allocated at startup. Possible values: * 1.0:", "\"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V Virtual Switch is a", "drive with Hyper-V, you must set the ``mkisofs_cmd`` value to the full path", "Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable values::", "specs: **os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200,", "Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event", "Hyper-V hypervisor driver to be used within an OpenStack deployment. \"\"\") hyperv_opts =", "ratio Enables dynamic memory allocation (ballooning) when set to a value greater than", "list is queried using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0,", "in the same directory as the nova-compute service or its path is in", "is required for some of the image related operations like converting between different", "to the full path to an ``qemu-img`` command installation. * You can configure", "CONDITIONS OF ANY KIND, either express or implied. See the # License for", "option is False, qemu-img will be used to convert the ISO to a", "feature to be enabled. MPIO must be configured to claim such devices. \"\"\"),", "as the config drive image. * To use config drive with Hyper-V, you", "Enables allocation of total implied RAM divided by this value for startup. \"\"\"),", "either give the full path of qemu-img.exe or set its path in the", "hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation", "is only available on Windows / Hyper-V Server 2016. **os:monitors**. Guest VM number", "default=60, min=0, help=\"\"\" Power state check timeframe The timeframe to be checked for", "managed and extensible capabilities to connect virtual machines to both virtual networks and", "checking for a mounted disk. The query runs until the device can be", "option must be an external one (not internal or private). Possible values: *", "startup. Possible values: * 1.0: Disables dynamic memory allocation (Default). * Float values", "enabled. MPIO must be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\"", "in seconds (Default: 5). Related options: * This option is meaningful when the", "compliance with the License. You may obtain # a copy of the License", "locally. Possible values: * \"\": An administrative share will be used (Default). *", "(Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom", "back to hard reboot if instance does not shutdown within this window. Possible", "CD drive. OpenStack can be configured to write instance metadata to a config", "used. This list is queried using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds',", "of RAM allocated at startup. Possible values: * 1.0: Disables dynamic memory allocation", "and service levels. The vSwitch represented by this config option must be an", "config drive with Hyper-V, you must set the ``mkisofs_cmd`` value to the full", "image types. You can get it from here: (http://qemu.weilnetz.de/) or you can install", "in order to limit the CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count',", "dynamic memory allocation (ballooning) when set to a value greater than 1. The", "name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds Number of seconds", "may # not use this file except in compliance with the License. You", "available from the config drive image. Related options: * This option is meaningful", "retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk", "the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name',", "\"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that will be used for", "vSwitch represented by this config option must be an external one (not internal", "Only available on Windows / Hyper-V Server 2016. Acceptable values:: 64, 128, 256,", "the proper path for this config option. You can either give the full", "Use multipath connections when attaching iSCSI or FC disks. This requires the Multipath", "This option is used to fetch the state of the instance from Hyper-V", "Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature has to", "attached as a disk drive (default) or as a CD drive. Related options:", "host. If left blank, an administrative share (hidden network share) will be used,", "nova-compute service or its path is in the PATH environment variable (Default). *", "ratio between the total RAM assigned to an instance and its startup RAM", "help=\"\"\" Volume attach retry count The number of times to retry attaching a", "(http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which", "configured to write instance metadata to a config drive, which is then attached", "instance to shut down after soft reboot request is made. We fall back", "RDS-Virtualization feature has to be enabled. Instances with RemoteFX can be requested with", "configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators", "that an operator has to change this value. Possible values: * Time in", "(Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling interval Instance", "to be enabled. Instances with RemoteFX can be requested with the following flavor", "= cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature allows you to configure", "as a CD drive. Related options: * This option is meaningful with ``force_config_drive``", "the full path of qemu-img.exe or set its path in the PATH environment", "``--config-drive=True`` flag. * ``config_drive_format`` option must be set to ``iso9660`` in order to", "requires the Multipath IO Windows feature to be enabled. MPIO must be configured", "have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set to ``iso9660`` in order", "for power state events to the given value. This option enhances the internal", "path in the PATH environment variable and leave this option to the default", "attachment is retried until success or the given retry count is reached. Possible", "can be found or the retry count is reached. Possible values: * Positive", "must be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of", "Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable values:: [1, 4] -", "min=0, help=\"\"\" Volume attach retry interval Interval between volume attachment attempts, in seconds.", "set its path in the PATH environment variable and leave this option to", "In addition, Hyper-V Virtual Switch provides policy enforcement for security, isolation, and service", "1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows /", "least one DirectX 11 capable graphics adapter for Windows / Hyper-V Server 2012", "sessions. If none are specified, the Microsoft iSCSI initiator service will choose the", "to hard reboot if instance does not shutdown within this window. Possible values:", "cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name of a Windows share mapped", "attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach", "be checked for instance power state changes. This option is used to fetch", "Windows share mapped to the \"instances_path\" dir and used by the resize feature", "path to an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value to", "instance and its startup RAM amount. For example a ratio of 2.0 for", "CPU features This flag is needed to support live migration to hosts with", "used by the resize feature to copy files to the target host. If", "options: * This option is meaningful with ``force_config_drive`` option set to ``True`` or", "cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry interval Interval between checks for", "this config option. You can either give the full path of qemu-img.exe or", "related operations like converting between different image types. You can get it from", "between disk mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0,", "for Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature has", "environment variable and leave this option to the default value. Possible values: *", "total implied RAM divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\"", "allocated at startup. Possible values: * 1.0: Disables dynamic memory allocation (Default). *", "full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name", "both virtual networks and the physical network. In addition, Hyper-V Virtual Switch provides", "retry count The number of times to retry checking for a mounted disk.", "\"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval Interval between volume attachment", "a ratio of 2.0 for an instance with 1024MB of RAM implies 512MB", "switch includes programmatically managed and extensible capabilities to connect virtual machines to both", "/ Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable values:: [1,", "options that enable config drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count',", "cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval Interval between volume attachment attempts,", "use this file except in compliance with the License. You may obtain #", "times to retry checking for a mounted disk. The query runs until the", "2016 **os:vram**. Guest VM VRAM amount. Only available on Windows / Hyper-V Server", "drive image. Related options: * This option is meaningful when used with other", "/ Hyper-V Server 2012 R2 [1, 8] - Windows / Hyper-V Server 2016", "between attachment attempts is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\"", "name of a Windows share mapped to the \"instances_path\" dir and used by", "options: * This options is meaningful when volume_attach_retry_count is greater than 1. *", "be retrieved by other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\"", "Hyper-V Server 2016. **os:monitors**. Guest VM number of monitors. Acceptable values:: [1, 4]", "as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count The number", "Hyper-V Virtual Switch provides policy enforcement for security, isolation, and service levels. The", "external one (not internal or private). Possible values: * If not provided, the", "The config drive can be attached as a disk drive (default) or as", "in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling", "KIND, either express or implied. See the # License for the specific language", "Limit CPU features This flag is needed to support live migration to hosts", "is declared with volume_attach_retry_interval option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry", "file except in compliance with the License. You may obtain # a copy", "or the retry count is reached. Possible values: * Positive integer values. Values", "in order to use CD drive as the config drive image. * To", "the PATH environment variable and leave this option to the default value. Possible", "Microsoft iSCSI initiator service will choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group)", "Time in seconds (Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is", "can configure the Compute service to always create a configuration drive by setting", "by using Hyper-V's metric APIs. Collected data can be retrieved by other apps", "The vSwitch represented by this config option must be an external one (not", "reboot themselves. It is unlikely that an operator has to change this value.", "call to create an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must", "that is available with the installation of the Hyper-V server role. The switch", "available on Windows / Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512,", "instance does not shutdown within this window. Possible values: * Time in seconds", "1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows / Hyper-V Server 2016.", "the # License for the specific language governing permissions and limitations # under", "hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature allows you to", "or set its path in the PATH environment variable and leave this option", "option enhances the internal lifecycle notifications of instances that reboot themselves. It is", "runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power", "oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature", "when used with other options that enable config drive usage with Hyper-V, such", "not shutdown within this window. Possible values: * Time in seconds (Default: 60).", "using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft", "such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count The", "or agreed to in writing, software # distributed under the License is distributed", "(Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a CD drive.", "adapter for Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature", "option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive. When", "window. Possible values: * Time in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\"", "List of iSCSI initiators that will be used for estabilishing iSCSI sessions. If", "the same \"instances_path\" used locally. Possible values: * \"\": An administrative share will", "value. This option enhances the internal lifecycle notifications of instances that reboot themselves.", "available vswitches is used. This list is queried using WQL. * Virtual switch", "API call to create an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option", "License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "Time in seconds (Default: 5). Related options: * This option is meaningful when", "``True`` or when the REST API call to create an instance will have", "at startup. Possible values: * 1.0: Disables dynamic memory allocation (Default). * Float", "value greater than 1. The value expresses the ratio between the total RAM", "help=\"\"\" Mounted disk query retry count The number of times to retry checking", "is in the PATH environment variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND).", "number of times to retry attaching a volume. Volume attachment is retried until", "loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable", "this config option must be an external one (not internal or private). Possible", "help=\"\"\" Limit CPU features This flag is needed to support live migration to", "as the nova-compute service or its path is in the PATH environment variable", "the default value. Possible values: * Name of the qemu-img executable, in case", "retry count The number of times to retry attaching a volume. Volume attachment", "VHD, otherwise the config drive will remain an ISO. To use config drive", "with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature", "This requires at least one DirectX 11 capable graphics adapter for Windows /", "if instance does not shutdown within this window. Possible values: * Time in", "a mounted disk, in seconds. Possible values: * Time in seconds (Default: 5).", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "CD drive as the config drive image. * To use config drive with", "share. Related options: * \"instances_path\": The directory which will be used if this", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may # not", "used (Default). * Name of a Windows share. Related options: * \"instances_path\": The", "from Hyper-V through the WMI interface, within the specified timeframe. Possible values: *", "by this config option must be an external one (not internal or private).", "switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds Number of", "to shut down after soft reboot request is made. We fall back to", "data can be retrieved by other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share',", "(default) or as a CD drive. Related options: * This option is meaningful", "or FC disks. This requires the Multipath IO Windows feature to be enabled.", "\"instances_path\" used locally. Possible values: * \"\": An administrative share will be used", "drive, which is then attached to the instance before it boots. The config", "# Copyright (c) 2016 <NAME> # All Rights Reserved. # # Licensed under", "License, Version 2.0 (the \"License\"); you may # not use this file except", "environment variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If", "2016. **os:monitors**. Guest VM number of monitors. Acceptable values:: [1, 4] - Windows", "divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics", "when the REST API call to create an instance will have ``--config-drive=True`` flag.", "or its path is in the PATH environment variable (Default). * Path of", "cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature allows you", "automatically sets the proper path for this config option. You can either give", "ISO to a VHD, otherwise the config drive will remain an ISO. To", "than 1. The value expresses the ratio between the total RAM assigned to", "an ``qemu-img`` command installation. * You can configure the Compute service to always", "* Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state", "config drive will remain an ISO. To use config drive with Hyper-V, you", "be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI", "seconds (Default: 5). Related options: * This option is meaningful when the mounted_disk_query_retry_count", "(DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option is False, qemu-img will be", "help=\"\"\" Instances path share The name of a Windows share mapped to the", "can be configured to write instance metadata to a config drive, which is", "it is in the same directory as the nova-compute service or its path", "the qemu-img executable, in case it is in the same directory as the", "Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on", "config_drive_cdrom option is False, qemu-img will be used to convert the ISO to", "Windows / Hyper-V Server 2012 R2 [1, 8] - Windows / Hyper-V Server", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "a volume. Volume attachment is retried until success or the given retry count", "``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive.", "config option must be an external one (not internal or private). Possible values:", "attachment attempts, in seconds. Possible values: * Time in seconds (Default: 5). Related", "volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This", "variable and leave this option to the default value. Possible values: * Name", "\"instances_path\": The directory which will be used if this option here is left", "service levels. The vSwitch represented by this config option must be an external", "(https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this config option. You can", "is meaningful when the mounted_disk_query_retry_count is greater than 1. * The retry loop", "except in compliance with the License. You may obtain # a copy of", "interval between disk mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5,", "(hidden network share) will be used, looking for the same \"instances_path\" used locally.", "attempts, in seconds. Possible values: * Time in seconds (Default: 5). Related options:", "min=0, help=\"\"\" Power state event polling interval Instance power state change event polling", "help=\"\"\" qemu-img command qemu-img is required for some of the image related operations", "cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables metrics collections for an instance", "lifecycle notifications of instances that reboot themselves. It is unlikely that an operator", "options: * \"instances_path\": The directory which will be used if this option here", "is greater than 1. * The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval", "event polling interval Instance power state change event polling frequency. Sets the listener", "such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that will be", "virtual networks and the physical network. In addition, Hyper-V Virtual Switch provides policy", "can be requested with the following flavor extra specs: **os:resolution**. Guest VM screen", "to create an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be", "share The name of a Windows share mapped to the \"instances_path\" dir and", "path for this config option. You can either give the full path of", "Related options: * Time interval between disk mount retries is declared with \"mounted_disk_query_retry_interval\"", "with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state", "screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is", "share (hidden network share) will be used, looking for the same \"instances_path\" used", "between volume attachment attempts, in seconds. Possible values: * Time in seconds (Default:", "language governing permissions and limitations # under the License. from oslo_config import cfg", "Mounted disk query retry interval Interval between checks for a mounted disk, in", "Related options: * If the config_drive_cdrom option is False, qemu-img will be used", "use config drive with Hyper-V, you must set the ``mkisofs_cmd`` value to the", "default=False, help=\"\"\" Enable RemoteFX feature This requires at least one DirectX 11 capable", "Windows / Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"),", "Hyper-V Server 2016. Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False,", "1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only available on Windows / Hyper-V", "iSCSI initiator service will choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts,", "config option. You can either give the full path of qemu-img.exe or set", "disk mount retries is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\"", "License for the specific language governing permissions and limitations # under the License.", "extensible capabilities to connect virtual machines to both virtual networks and the physical", "seconds Number of seconds to wait for instance to shut down after soft", "default=False, help=\"\"\" Use multipath connections when attaching iSCSI or FC disks. This requires", "network switch that is available with the installation of the Hyper-V server role.", "\"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching iSCSI or FC disks.", "cfg.BoolOpt('config_drive_inject_password', default=False, help=\"\"\" Inject password to config drive. When enabled, the admin password", "2016 <NAME> # All Rights Reserved. # # Licensed under the Apache License,", "allocation of total implied RAM divided by this value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection',", "when volume_attach_retry_count is greater than 1. * The retry loop runs with volume_attach_retry_count", "of the instance from Hyper-V through the WMI interface, within the specified timeframe.", "specified timeframe. Possible values: * Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2,", "internal or private). Possible values: * If not provided, the first of a", "disk query retry interval Interval between checks for a mounted disk, in seconds.", "feature has to be enabled. Instances with RemoteFX can be requested with the", "``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\"", "its path in the PATH environment variable and leave this option to the", "installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V Virtual Switch is", "installation. Additionally, you must set the ``qemu_img_cmd`` value to the full path to", "than 1. * The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options.", "policy enforcement for security, isolation, and service levels. The vSwitch represented by this", "checked during instance creation in order to limit the CPU features used by", "used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry", "is made. We fall back to hard reboot if instance does not shutdown", "of times to retry checking for a mounted disk. The query runs until", "hosts with different CPU features and checked during instance creation in order to", "default=5, min=0, help=\"\"\" Mounted disk query retry interval Interval between checks for a", "The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False,", "Related options: * \"instances_path\": The directory which will be used if this option", "private). Possible values: * If not provided, the first of a list of", "an ``mkisofs.exe`` installation. Additionally, you must set the ``qemu_img_cmd`` value to the full", "reboot seconds Number of seconds to wait for instance to shut down after", "different image types. You can get it from here: (http://qemu.weilnetz.de/) or you can", "option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry interval Interval between", "dir and used by the resize feature to copy files to the target", "in case it is in the same directory as the nova-compute service or", "vswitches is used. This list is queried using WQL. * Virtual switch name.", "themselves. It is unlikely that an operator has to change this value. Possible", "attached to the instance before it boots. The config drive can be attached", "default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation (ballooning) when set to", "runs until the device can be found or the retry count is reached.", "be used within an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\"", "the instance from Hyper-V through the WMI interface, within the specified timeframe. Possible", "order to use CD drive as the config drive image. * To use", "to support live migration to hosts with different CPU features and checked during", "enabled, the admin password will be available from the config drive image. Related", "the listener interval for power state events to the given value. This option", "R2 or newer and RDS-Virtualization feature has to be enabled. Instances with RemoteFX", "License. You may obtain # a copy of the License at # #", "is used. This list is queried using WQL. * Virtual switch name. \"\"\"),", "config drive image. * To use config drive with Hyper-V, you must set", "option. \"\"\"), cfg.IntOpt('volume_attach_retry_interval', default=5, min=0, help=\"\"\" Volume attach retry interval Interval between volume", "is reached. Possible values: * Positive integer values (Default: 10). Related options: *", "the config drive image. * To use config drive with Hyper-V, you must", "values: * Positive integer values (Default: 10). Related options: * Time interval between", "to wait for instance to shut down after soft reboot request is made.", "Virtual Switch provides policy enforcement for security, isolation, and service levels. The vSwitch", "admin password will be available from the config drive image. Related options: *", "startup RAM amount. For example a ratio of 2.0 for an instance with", "* \"instances_path\": The directory which will be used if this option here is", "options: * If the config_drive_cdrom option is False, qemu-img will be used to", "ANY KIND, either express or implied. See the # License for the specific", "retry checking for a mounted disk. The query runs until the device can", "the following flavor extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable values::", "* ``config_drive_format`` option must be set to ``iso9660`` in order to use CD", "Power state event polling interval Instance power state change event polling frequency. Sets", "Acceptable values:: [1, 4] - Windows / Hyper-V Server 2012 R2 [1, 8]", "qemu-img executable, in case it is in the same directory as the nova-compute", "of the qemu-img executable, in case it is in the same directory as", "collection Enables metrics collections for an instance by using Hyper-V's metric APIs. Collected", "before it boots. The config drive can be attached as a disk drive", "implies 512MB of RAM allocated at startup. Possible values: * 1.0: Disables dynamic", "soft reboot request is made. We fall back to hard reboot if instance", "R2 [1, 8] - Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM", "Additionally, you must set the ``qemu_img_cmd`` value to the full path to an", "retry count is reached. Possible values: * Positive integer values. Values greater than", "qemu-img is required for some of the image related operations like converting between", "Hyper-V Server 2012 R2 [1, 8] - Windows / Hyper-V Server 2016 **os:vram**.", "Possible values: * \"\": An administrative share will be used (Default). * Name", "disk, in seconds. Possible values: * Time in seconds (Default: 5). Related options:", "listener interval for power state events to the given value. This option enhances", "instance from Hyper-V through the WMI interface, within the specified timeframe. Possible values:", "Interval between checks for a mounted disk, in seconds. Possible values: * Time", "drive will remain an ISO. To use config drive with Hyper-V, you must", "WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot", "See the # License for the specific language governing permissions and limitations #", "of 2.0 for an instance with 1024MB of RAM implies 512MB of RAM", "internal lifecycle notifications of instances that reboot themselves. It is unlikely that an", "default=False, help=\"\"\" Mount config drive as a CD drive. OpenStack can be configured", "will choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts():", "instance metrics collection Enables metrics collections for an instance by using Hyper-V's metric", "law or agreed to in writing, software # distributed under the License is", "the PATH environment variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options:", "command installation. * You can configure the Compute service to always create a", "Related options: * Time interval between attachment attempts is declared with volume_attach_retry_interval option.", "to always create a configuration drive by setting the ``force_config_drive`` option to ``True``.", "to retry checking for a mounted disk. The query runs until the device", "greater than 1. * The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "* If the config_drive_cdrom option is False, qemu-img will be used to convert", "from the config drive image. Related options: * This option is meaningful when", "meaningful when used with other options that enable config drive usage with Hyper-V,", "and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name", "Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for", "cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature allows you to configure the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "config drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\"", "- Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only available", "network. In addition, Hyper-V Virtual Switch provides policy enforcement for security, isolation, and", "If none are specified, the Microsoft iSCSI initiator service will choose the initiator.", "the ISO to a VHD, otherwise the config drive will remain an ISO.", "the nova-compute service or its path is in the PATH environment variable (Default).", "has to be enabled. Instances with RemoteFX can be requested with the following", "attaching iSCSI or FC disks. This requires the Multipath IO Windows feature to", "to use CD drive as the config drive image. * To use config", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "to hosts with different CPU features and checked during instance creation in order", "used if this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit", "the target host. If left blank, an administrative share (hidden network share) will", "software-based layer-2 Ethernet network switch that is available with the installation of the", "help=\"\"\" The hyperv feature allows you to configure the Hyper-V hypervisor driver to", "an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set to", "between checks for a mounted disk, in seconds. Possible values: * Time in", "2012 R2 [1, 8] - Windows / Hyper-V Server 2016 **os:vram**. Guest VM", "cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires at least one DirectX 11", "until the device can be found or the retry count is reached. Possible", "value for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables metrics", "one (not internal or private). Possible values: * If not provided, the first", "[ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation (ballooning) when", "this file except in compliance with the License. You may obtain # a", "order to limit the CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10,", "to both virtual networks and the physical network. In addition, Hyper-V Virtual Switch", "copy files to the target host. If left blank, an administrative share (hidden", "to a VHD, otherwise the config drive will remain an ISO. To use", "or implied. See the # License for the specific language governing permissions and", "seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a CD", "integer values (Default: 10). Related options: * Time interval between attachment attempts is", "default=False, help=\"\"\" Inject password to config drive. When enabled, the admin password will", "timeframe The timeframe to be checked for instance power state changes. This option", "when the mounted_disk_query_retry_count is greater than 1. * The retry loop runs with", "be set to ``iso9660`` in order to use CD drive as the config", "512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching iSCSI or", "will be available from the config drive image. Related options: * This option", "and checked during instance creation in order to limit the CPU features used", "Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath", "value expresses the ratio between the total RAM assigned to an instance and", "* The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx',", "of the Hyper-V server role. The switch includes programmatically managed and extensible capabilities", "We fall back to hard reboot if instance does not shutdown within this", "Possible values: * Time in seconds (Default: 5). Related options: * This option", "must be set to ``iso9660`` in order to use CD drive as the", "ISO. To use config drive with Hyper-V, you must set the ``mkisofs_cmd`` value", "The number of times to retry attaching a volume. Volume attachment is retried", "to the \"instances_path\" dir and used by the resize feature to copy files", "image related operations like converting between different image types. You can get it", "* 1.0: Disables dynamic memory allocation (Default). * Float values greater than 1.0:", "with ``force_config_drive`` option set to ``True`` or when the REST API call to", "the image related operations like converting between different image types. You can get", "extra specs: **os:resolution**. Guest VM screen resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200,", "PATH environment variable and leave this option to the default value. Possible values:", "a value greater than 1. The value expresses the ratio between the total", "values: * Positive integer values. Values greater than 1 is recommended (Default: 10).", "the specific language governing permissions and limitations # under the License. from oslo_config", "configure the Hyper-V hypervisor driver to be used within an OpenStack deployment. \"\"\")", "(Default: 2). \"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for some", "with the installation of the Hyper-V server role. The switch includes programmatically managed", "be used for estabilishing iSCSI sessions. If none are specified, the Microsoft iSCSI", "RAM implies 512MB of RAM allocated at startup. Possible values: * 1.0: Disables", "Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this config option. You", "than 1 is recommended (Default: 10). Related options: * Time interval between disk", "help=\"\"\" Power state event polling interval Instance power state change event polling frequency.", "allocation (ballooning) when set to a value greater than 1. The value expresses", "memory allocation (Default). * Float values greater than 1.0: Enables allocation of total", "interval for power state events to the given value. This option enhances the", "path of qemu-img.exe or set its path in the PATH environment variable and", "blank, an administrative share (hidden network share) will be used, looking for the", "Number of seconds to wait for instance to shut down after soft reboot", "an operator has to change this value. Possible values: * Time in seconds", "You can either give the full path of qemu-img.exe or set its path", "times to retry attaching a volume. Volume attachment is retried until success or", "of the image related operations like converting between different image types. You can", "option must be set to ``iso9660`` in order to use CD drive as", "Float values greater than 1.0: Enables allocation of total implied RAM divided by", "devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that will be used", "feature allows you to configure the Hyper-V hypervisor driver to be used within", "for security, isolation, and service levels. The vSwitch represented by this config option", "boots. The config drive can be attached as a disk drive (default) or", "and RDS-Virtualization feature has to be enabled. Instances with RemoteFX can be requested", "the same directory as the nova-compute service or its path is in the", "to fetch the state of the instance from Hyper-V through the WMI interface,", "<filename>nova/conf/hyperv.py<gh_stars>0 # Copyright (c) 2016 <NAME> # All Rights Reserved. # # Licensed", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "role. The switch includes programmatically managed and extensible capabilities to connect virtual machines", "left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag is needed", "implied. See the # License for the specific language governing permissions and limitations", "retry attaching a volume. Volume attachment is retried until success or the given", "use CD drive as the config drive image. * To use config drive", "set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. \"\"\"),", "# under the License. from oslo_config import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V", "does not shutdown within this window. Possible values: * Time in seconds (Default:", "an ``mkisofs.exe`` installation. \"\"\"), cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V Virtual", "2016. Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use", "hypervisor driver to be used within an OpenStack deployment. \"\"\") hyperv_opts = [", "the Compute service to always create a configuration drive by setting the ``force_config_drive``", "or newer and RDS-Virtualization feature has to be enabled. Instances with RemoteFX can", "e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name of a", "of a Windows share mapped to the \"instances_path\" dir and used by the", "event polling frequency. Sets the listener interval for power state events to the", "for estabilishing iSCSI sessions. If none are specified, the Microsoft iSCSI initiator service", "is recommended (Default: 10). Related options: * Time interval between disk mount retries", "loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\"", "option. You can either give the full path of qemu-img.exe or set its", "128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching", "\"\"\"), cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for some of the", "cfg.ListOpt('iscsi_initiator_list', default=[], help=\"\"\" List of iSCSI initiators that will be used for estabilishing", "Name of a Windows share. Related options: * \"instances_path\": The directory which will", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. Additionally,", "import cfg hyperv_opt_group = cfg.OptGroup(\"hyperv\", title='The Hyper-V feature', help=\"\"\" The hyperv feature allows", "default=False, help=\"\"\" Limit CPU features This flag is needed to support live migration", "\"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a CD drive. OpenStack can", "write instance metadata to a config drive, which is then attached to the", "provides policy enforcement for security, isolation, and service levels. The vSwitch represented by", "you must set the ``mkisofs_cmd`` value to the full path to an ``mkisofs.exe``", "CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk", "This option is meaningful with ``force_config_drive`` option set to ``True`` or when the", "``iso9660`` in order to use CD drive as the config drive image. *", "required by applicable law or agreed to in writing, software # distributed under", "retrieved by other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances", "(Default). * Float values greater than 1.0: Enables allocation of total implied RAM", "looking for the same \"instances_path\" used locally. Possible values: * \"\": An administrative", "none are specified, the Microsoft iSCSI initiator service will choose the initiator. \"\"\")", "from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack Hyper-V Compute Driver", "that reboot themselves. It is unlikely that an operator has to change this", "be configured to write instance metadata to a config drive, which is then", "in compliance with the License. You may obtain # a copy of the", "options: * This option is meaningful when the mounted_disk_query_retry_count is greater than 1.", "default=10, min=0, help=\"\"\" Mounted disk query retry count The number of times to", "by other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path", "services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name of", "OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables", "password will be available from the config drive image. Related options: * This", "default=2, min=0, help=\"\"\" Power state event polling interval Instance power state change event", "(Default: 5). Related options: * This option is meaningful when the mounted_disk_query_retry_count is", "The number of times to retry checking for a mounted disk. The query", "VM number of monitors. Acceptable values:: [1, 4] - Windows / Hyper-V Server", "multipath connections when attaching iSCSI or FC disks. This requires the Multipath IO", "with RemoteFX can be requested with the following flavor extra specs: **os:resolution**. Guest", "option is used to fetch the state of the instance from Hyper-V through", "dynamic memory allocation (Default). * Float values greater than 1.0: Enables allocation of", "create an instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set", "cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe The timeframe to be checked", "[1, 8] - Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount.", "query retry count The number of times to retry checking for a mounted", "(ballooning) when set to a value greater than 1. The value expresses the", "a Windows share. Related options: * \"instances_path\": The directory which will be used", "enforcement for security, isolation, and service levels. The vSwitch represented by this config", "\"instances_path\" dir and used by the resize feature to copy files to the", "install the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper", "Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the config_drive_cdrom option is", "down after soft reboot request is made. We fall back to hard reboot", "greater than 1 is recommended (Default: 10). Related options: * Time interval between", "RemoteFX feature This requires at least one DirectX 11 capable graphics adapter for", "used, looking for the same \"instances_path\" used locally. Possible values: * \"\": An", "2.0 (the \"License\"); you may # not use this file except in compliance", "executable, in case it is in the same directory as the nova-compute service", "be an external one (not internal or private). Possible values: * If not", "includes programmatically managed and extensible capabilities to connect virtual machines to both virtual", "a list of available vswitches is used. This list is queried using WQL.", "not provided, the first of a list of available vswitches is used. This", "will be used for estabilishing iSCSI sessions. If none are specified, the Microsoft", "# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach", "its startup RAM amount. For example a ratio of 2.0 for an instance", "to the default value. Possible values: * Name of the qemu-img executable, in", "initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts(): return {hyperv_opt_group: hyperv_opts}", "used locally. Possible values: * \"\": An administrative share will be used (Default).", "by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query retry count", "This requires the Multipath IO Windows feature to be enabled. MPIO must be", "shut down after soft reboot request is made. We fall back to hard", "option set to ``True`` or when the REST API call to create an", "create a configuration drive by setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password',", "the resize feature to copy files to the target host. If left blank,", "requires at least one DirectX 11 capable graphics adapter for Windows / Hyper-V", "drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume", "``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count The number of", "to be enabled. MPIO must be configured to claim such devices. \"\"\"), cfg.ListOpt('iscsi_initiator_list',", "to an ``qemu-img`` command installation. * You can configure the Compute service to", "like converting between different image types. You can get it from here: (http://qemu.weilnetz.de/)", "represented by this config option must be an external one (not internal or", "cfg.StrOpt('qemu_img_cmd', default=\"qemu-img.exe\", help=\"\"\" qemu-img command qemu-img is required for some of the image", "will be used to convert the ISO to a VHD, otherwise the config", "to config drive. When enabled, the admin password will be available from the", "cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a CD drive. OpenStack can be", "reboot if instance does not shutdown within this window. Possible values: * Time", "power state change event polling frequency. Sets the listener interval for power state", "be attached as a disk drive (default) or as a CD drive. Related", "instance by using Hyper-V's metric APIs. Collected data can be retrieved by other", "reached. Possible values: * Positive integer values. Values greater than 1 is recommended", "image. * To use config drive with Hyper-V, you must set the ``mkisofs_cmd``", "enabled. Instances with RemoteFX can be requested with the following flavor extra specs:", "be used (Default). * Name of a Windows share. Related options: * \"instances_path\":", "help=\"\"\" Power state check timeframe The timeframe to be checked for instance power", "Possible values: * 1.0: Disables dynamic memory allocation (Default). * Float values greater", "default=60, min=0, help=\"\"\" Wait soft reboot seconds Number of seconds to wait for", "physical network. In addition, Hyper-V Virtual Switch provides policy enforcement for security, isolation,", "Sets the listener interval for power state events to the given value. This", "features and checked during instance creation in order to limit the CPU features", "to a value greater than 1. The value expresses the ratio between the", "then attached to the instance before it boots. The config drive can be", "types. You can get it from here: (http://qemu.weilnetz.de/) or you can install the", "3840x2160 ``3840x2160`` is only available on Windows / Hyper-V Server 2016. **os:monitors**. Guest", "interval Instance power state change event polling frequency. Sets the listener interval for", "total RAM assigned to an instance and its startup RAM amount. For example", "Enables metrics collections for an instance by using Hyper-V's metric APIs. Collected data", "mounted_disk_query_retry_count is greater than 1. * The retry loop runs with mounted_disk_query_retry_count and", "or as a CD drive. Related options: * This option is meaningful with", "be used to convert the ISO to a VHD, otherwise the config drive", "value to the full path to an ``qemu-img`` command installation. * You can", "to a config drive, which is then attached to the instance before it", "the internal lifecycle notifications of instances that reboot themselves. It is unlikely that", "for this config option. You can either give the full path of qemu-img.exe", "When enabled, the admin password will be available from the config drive image.", "ratio of 2.0 for an instance with 1024MB of RAM implies 512MB of", "This list is queried using WQL. * Virtual switch name. \"\"\"), cfg.IntOpt('wait_soft_reboot_seconds', default=60,", "device can be found or the retry count is reached. Possible values: *", "amount. Only available on Windows / Hyper-V Server 2016. Acceptable values:: 64, 128,", "with Hyper-V, you must set the ``mkisofs_cmd`` value to the full path to", "# License for the specific language governing permissions and limitations # under the", "is greater than 1. * The retry loop runs with volume_attach_retry_count and volume_attach_retry_interval", "collections for an instance by using Hyper-V's metric APIs. Collected data can be", "name The Hyper-V Virtual Switch is a software-based layer-2 Ethernet network switch that", "Dynamic memory ratio Enables dynamic memory allocation (ballooning) when set to a value", "and its startup RAM amount. For example a ratio of 2.0 for an", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "* The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe',", "in writing, software # distributed under the License is distributed on an \"AS", "the total RAM assigned to an instance and its startup RAM amount. For", "with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry", "choose the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts(): return", "sets the proper path for this config option. You can either give the", "default value. Possible values: * Name of the qemu-img executable, in case it", "amount. For example a ratio of 2.0 for an instance with 1024MB of", "leave this option to the default value. Possible values: * Name of the", "Reserved. # # Licensed under the Apache License, Version 2.0 (the \"License\"); you", "the Apache License, Version 2.0 (the \"License\"); you may # not use this", "used with other options that enable config drive usage with Hyper-V, such as", "which is then attached to the instance before it boots. The config drive", "the Microsoft iSCSI initiator service will choose the initiator. \"\"\") ] def register_opts(conf):", "which automatically sets the proper path for this config option. You can either", "FC disks. This requires the Multipath IO Windows feature to be enabled. MPIO", "Instance power state change event polling frequency. Sets the listener interval for power", "This options is meaningful when volume_attach_retry_count is greater than 1. * The retry", "the Hyper-V server role. The switch includes programmatically managed and extensible capabilities to", "used within an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic", "\"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables metrics collections for an", "state check timeframe The timeframe to be checked for instance power state changes.", "or private). Possible values: * If not provided, the first of a list", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #", "Possible values: * If not provided, the first of a list of available", "instance creation in order to limit the CPU features used by the instance.", "for some of the image related operations like converting between different image types.", "* This option is meaningful when used with other options that enable config", "converting between different image types. You can get it from here: (http://qemu.weilnetz.de/) or", "OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this", "meaningful when the mounted_disk_query_retry_count is greater than 1. * The retry loop runs", "can get it from here: (http://qemu.weilnetz.de/) or you can install the Cloudbase OpenStack", "(the \"License\"); you may # not use this file except in compliance with", "Related options: * This option is meaningful when the mounted_disk_query_retry_count is greater than", "state of the instance from Hyper-V through the WMI interface, within the specified", "to the full path to an ``mkisofs.exe`` installation. Additionally, you must set the", "connect virtual machines to both virtual networks and the physical network. In addition,", "# # Unless required by applicable law or agreed to in writing, software", "(Default: 10). Related options: * Time interval between disk mount retries is declared", "\"\"\"), cfg.StrOpt('instances_path_share', default=\"\", help=\"\"\" Instances path share The name of a Windows share", "is declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query", "Compute service to always create a configuration drive by setting the ``force_config_drive`` option", "password to config drive. When enabled, the admin password will be available from", "The name of a Windows share mapped to the \"instances_path\" dir and used", "it boots. The config drive can be attached as a disk drive (default)", "enhances the internal lifecycle notifications of instances that reboot themselves. It is unlikely", "* To use config drive with Hyper-V, you must set the ``mkisofs_cmd`` value", "the state of the instance from Hyper-V through the WMI interface, within the", "\"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10, min=0, help=\"\"\" Volume attach retry count The number of times", "``force_config_drive`` option set to ``True`` or when the REST API call to create", "levels. The vSwitch represented by this config option must be an external one", "startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables metrics collections for", "The retry loop runs with mounted_disk_query_retry_count and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60,", "variable (Default). * Path of qemu-img command (DRIVELETTER:\\PATH\\TO\\QEMU-IMG\\COMMAND). Related options: * If the", "that enable config drive usage with Hyper-V, such as ``force_config_drive``. \"\"\"), cfg.IntOpt('volume_attach_retry_count', default=10,", "Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path for this config option.", "The Hyper-V Virtual Switch is a software-based layer-2 Ethernet network switch that is", "values: * Name of the qemu-img executable, in case it is in the", "that will be used for estabilishing iSCSI sessions. If none are specified, the", "Enable instance metrics collection Enables metrics collections for an instance by using Hyper-V's", "programmatically managed and extensible capabilities to connect virtual machines to both virtual networks", "the Cloudbase OpenStack Hyper-V Compute Driver (https://cloudbase.it/openstack-hyperv-driver/) which automatically sets the proper path", "declared with \"mounted_disk_query_retry_interval\" option. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_interval', default=5, min=0, help=\"\"\" Mounted disk query retry", "and mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe", "\"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling interval Instance power state", "Hyper-V server role. The switch includes programmatically managed and extensible capabilities to connect", "the initiator. \"\"\") ] def register_opts(conf): conf.register_group(hyperv_opt_group) conf.register_opts(hyperv_opts, group=hyperv_opt_group) def list_opts(): return {hyperv_opt_group:", "command qemu-img is required for some of the image related operations like converting", "Wait soft reboot seconds Number of seconds to wait for instance to shut", "to the instance before it boots. The config drive can be attached as", "under the Apache License, Version 2.0 (the \"License\"); you may # not use", "in seconds (Default: 60). \"\"\"), cfg.BoolOpt('config_drive_cdrom', default=False, help=\"\"\" Mount config drive as a", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "drive image. * To use config drive with Hyper-V, you must set the", "The value expresses the ratio between the total RAM assigned to an instance", "and extensible capabilities to connect virtual machines to both virtual networks and the", "The timeframe to be checked for instance power state changes. This option is", "Possible values: * Timeframe in seconds (Default: 60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\"", "provided, the first of a list of available vswitches is used. This list", "``mkisofs_cmd`` value to the full path to an ``mkisofs.exe`` installation. Additionally, you must", "the installation of the Hyper-V server role. The switch includes programmatically managed and", "qemu-img will be used to convert the ISO to a VHD, otherwise the", "\"\": An administrative share will be used (Default). * Name of a Windows", "estabilishing iSCSI sessions. If none are specified, the Microsoft iSCSI initiator service will", "blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag is needed to", "min=0, help=\"\"\" Mounted disk query retry count The number of times to retry", "Hyper-V Server 2012 R2 or newer and RDS-Virtualization feature has to be enabled.", "title='The Hyper-V feature', help=\"\"\" The hyperv feature allows you to configure the Hyper-V", "to the given value. This option enhances the internal lifecycle notifications of instances", "share mapped to the \"instances_path\" dir and used by the resize feature to", "can be retrieved by other apps and services, e.g.: Ceilometer. \"\"\"), cfg.StrOpt('instances_path_share', default=\"\",", "interface, within the specified timeframe. Possible values: * Timeframe in seconds (Default: 60).", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "flag. * ``config_drive_format`` option must be set to ``iso9660`` in order to use", "Windows share. Related options: * \"instances_path\": The directory which will be used if", "cfg.IntOpt('wait_soft_reboot_seconds', default=60, min=0, help=\"\"\" Wait soft reboot seconds Number of seconds to wait", "Possible values: * Positive integer values (Default: 10). Related options: * Time interval", "set the ``qemu_img_cmd`` value to the full path to an ``qemu-img`` command installation.", "be found or the retry count is reached. Possible values: * Positive integer", "an external one (not internal or private). Possible values: * If not provided,", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "meaningful when volume_attach_retry_count is greater than 1. * The retry loop runs with", "for the same \"instances_path\" used locally. Possible values: * \"\": An administrative share", "\"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires at least one DirectX", "features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0, help=\"\"\" Mounted disk query", "Switch is a software-based layer-2 Ethernet network switch that is available with the", "8] - Windows / Hyper-V Server 2016 **os:vram**. Guest VM VRAM amount. Only", "or the given retry count is reached. Possible values: * Positive integer values", "* You can configure the Compute service to always create a configuration drive", "remain an ISO. To use config drive with Hyper-V, you must set the", "256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\" Use multipath connections when attaching iSCSI", "default=5, min=0, help=\"\"\" Volume attach retry interval Interval between volume attachment attempts, in", "False, qemu-img will be used to convert the ISO to a VHD, otherwise", "needed to support live migration to hosts with different CPU features and checked", "to ``iso9660`` in order to use CD drive as the config drive image.", "when set to a value greater than 1. The value expresses the ratio", "Positive integer values (Default: 10). Related options: * Time interval between attachment attempts", "volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\" Enable RemoteFX feature This requires at", "directory which will be used if this option here is left blank. \"\"\"),", "virtual machines to both virtual networks and the physical network. In addition, Hyper-V", "Server 2016. Acceptable values:: 64, 128, 256, 512, 1024 \"\"\"), cfg.BoolOpt('use_multipath_io', default=False, help=\"\"\"", "seconds. Possible values: * Time in seconds (Default: 5). Related options: * This", "this option here is left blank. \"\"\"), cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features", "If the config_drive_cdrom option is False, qemu-img will be used to convert the", "configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe The timeframe", "RAM allocated at startup. Possible values: * 1.0: Disables dynamic memory allocation (Default).", "checks for a mounted disk, in seconds. Possible values: * Time in seconds", "cfg.BoolOpt('limit_cpu_features', default=False, help=\"\"\" Limit CPU features This flag is needed to support live", "mounted disk, in seconds. Possible values: * Time in seconds (Default: 5). Related", "1 is recommended (Default: 10). Related options: * Time interval between disk mount", "1.0: Enables allocation of total implied RAM divided by this value for startup.", "resolution size. Acceptable values:: 1024x768, 1280x1024, 1600x1200, 1920x1200, 2560x1600, 3840x2160 ``3840x2160`` is only", "graphics adapter for Windows / Hyper-V Server 2012 R2 or newer and RDS-Virtualization", "set to ``iso9660`` in order to use CD drive as the config drive", "example a ratio of 2.0 for an instance with 1024MB of RAM implies", "Interval between volume attachment attempts, in seconds. Possible values: * Time in seconds", "for instance to shut down after soft reboot request is made. We fall", "specific language governing permissions and limitations # under the License. from oslo_config import", "an administrative share (hidden network share) will be used, looking for the same", "at least one DirectX 11 capable graphics adapter for Windows / Hyper-V Server", "for the specific language governing permissions and limitations # under the License. from", "REST API call to create an instance will have ``--config-drive=True`` flag. * ``config_drive_format``", "initiators that will be used for estabilishing iSCSI sessions. If none are specified,", "``qemu-img`` command installation. * You can configure the Compute service to always create", "a VHD, otherwise the config drive will remain an ISO. To use config", "a configuration drive by setting the ``force_config_drive`` option to ``True``. \"\"\"), cfg.BoolOpt('config_drive_inject_password', default=False,", "values: * 1.0: Disables dynamic memory allocation (Default). * Float values greater than", "hard reboot if instance does not shutdown within this window. Possible values: *", "``qemu_img_cmd`` value to the full path to an ``qemu-img`` command installation. * You", "cfg.StrOpt('vswitch_name', help=\"\"\" External virtual switch name The Hyper-V Virtual Switch is a software-based", "for startup. \"\"\"), cfg.BoolOpt('enable_instance_metrics_collection', default=False, help=\"\"\" Enable instance metrics collection Enables metrics collections", "to limit the CPU features used by the instance. \"\"\"), cfg.IntOpt('mounted_disk_query_retry_count', default=10, min=0,", "mounted_disk_query_retry_interval configuration options. \"\"\"), cfg.IntOpt('power_state_check_timeframe', default=60, min=0, help=\"\"\" Power state check timeframe The", "RemoteFX can be requested with the following flavor extra specs: **os:resolution**. Guest VM", "instance will have ``--config-drive=True`` flag. * ``config_drive_format`` option must be set to ``iso9660``", "the ratio between the total RAM assigned to an instance and its startup", "the retry count is reached. Possible values: * Positive integer values. Values greater", "mapped to the \"instances_path\" dir and used by the resize feature to copy", "security, isolation, and service levels. The vSwitch represented by this config option must", "for instance power state changes. This option is used to fetch the state", "changes. This option is used to fetch the state of the instance from", "volume attachment attempts, in seconds. Possible values: * Time in seconds (Default: 5).", "min=0, help=\"\"\" Mounted disk query retry interval Interval between checks for a mounted", "60). \"\"\"), cfg.IntOpt('power_state_event_polling_interval', default=2, min=0, help=\"\"\" Power state event polling interval Instance power", "driver to be used within an OpenStack deployment. \"\"\") hyperv_opts = [ cfg.FloatOpt('dynamic_memory_ratio',", "512MB of RAM allocated at startup. Possible values: * 1.0: Disables dynamic memory", "fall back to hard reboot if instance does not shutdown within this window.", "cfg.FloatOpt('dynamic_memory_ratio', default=1.0, help=\"\"\" Dynamic memory ratio Enables dynamic memory allocation (ballooning) when set", "layer-2 Ethernet network switch that is available with the installation of the Hyper-V", "retry loop runs with volume_attach_retry_count and volume_attach_retry_interval configuration options. \"\"\"), cfg.BoolOpt('enable_remotefx', default=False, help=\"\"\"", "be requested with the following flavor extra specs: **os:resolution**. Guest VM screen resolution", "instances that reboot themselves. It is unlikely that an operator has to change" ]
[ "path = input(\"Chemin d'écriture ? (words.txt) \") if path == \"\": path =", "\") if path == \"\": path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\") as", "d'écriture ? (words.txt) \") if path == \"\": path = \"./words.txt\" with open(path,", "= filter(lambda x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt)", "import requests words_list = requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4, words_list.split('\\n'))", "path == \"\": path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\") as file: file.write('\\n'.join(words_list))", "(words.txt) \") if path == \"\": path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\")", "words_list = requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4, words_list.split('\\n')) path =", "filter(lambda x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \")", "= input(\"Chemin d'écriture ? (words.txt) \") if path == \"\": path = \"./words.txt\"", "if path == \"\": path = \"./words.txt\" with open(path, \"w\", encoding=\"utf-8\") as file:", "x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \") if", "? (words.txt) \") if path == \"\": path = \"./words.txt\" with open(path, \"w\",", "requests words_list = requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4, words_list.split('\\n')) path", "requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture", "> 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \") if path ==", "4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \") if path == \"\":", "words_list = filter(lambda x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ?", "len(x) > 4, words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \") if path", "input(\"Chemin d'écriture ? (words.txt) \") if path == \"\": path = \"./words.txt\" with", "= requests.get(\"https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt\").text words_list = filter(lambda x: len(x) > 4, words_list.split('\\n')) path = input(\"Chemin", "words_list.split('\\n')) path = input(\"Chemin d'écriture ? (words.txt) \") if path == \"\": path" ]
[ "os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase):", "self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir:", "import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws')", "subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock file and uploader tempdir.write('index.html',", "gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/',", "= get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync',", "# Setup mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader", "gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path',", "uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with", "without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) #", "tempdir: # Setup mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html')", "Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude',", "['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without", "upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/',", "uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3',", "to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/',", "import mock import os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import", "bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude',", "subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__ == '__main__': unittest.main()", "Setup mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader =", "test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0)", "def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([],", "mock import os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader", "gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path)", "import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def", "to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) #", "'.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3',", "class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value", "'s3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without trailing", "uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload", "with TempDirectory() as tempdir: # Setup mock file and uploader tempdir.write('index.html', b'') gallery_path", "'s3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without prefix", "unittest import mock import os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory", "import unittest from unittest import mock import os import subprocess from testfixtures import", "trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if", "prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test", "'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without trailing /", "/ uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__", "simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def", "import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self,", "get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path,", "subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self):", "subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock file", "returncode=0) with TempDirectory() as tempdir: # Setup mock file and uploader tempdir.write('index.html', b'')", "'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/',", "'s3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with(", "upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path,", "= subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock file and uploader", "os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with(", "as tempdir: # Setup mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path,", "import os import subprocess from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class", "gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__ == '__main__':", "uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload", "TempDirectory() as tempdir: # Setup mock file and uploader tempdir.write('index.html', b'') gallery_path =", "subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock file and", "# Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/',", "bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])", "'--exclude', '.DS_Store']) # Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws',", "from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run')", "Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path,", "upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])", "'--exclude', '.DS_Store']) # Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with(", "def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup", "= get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory()", "'.DS_Store']) # Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws',", "@mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: #", "uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload", "TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location(''))", "= os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path)", "get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run):", "subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket", "without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])", "file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') #", "gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to", "# Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3',", "unittest from unittest import mock import os import subprocess from testfixtures import TempDirectory", "'index.html') uploader = get_uploader('aws') # Test upload to bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws',", "test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as tempdir: # Setup mock", "tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to", "uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) if __name__ ==", "Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync',", "testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader =", "'s3://testbucket/path/', '--exclude', '.DS_Store']) # Test upload to bucket without trailing / uploader.upload_gallery('s3://testbucket/path', gallery_path)", "get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0) with TempDirectory() as", "and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test", "AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader = get_uploader('aws') self.assertFalse(uploader.check_location('')) @mock.patch('subprocess.run') def test_upload_gallery(self, subprocess_run): subprocess_run.return_value =", "mock file and uploader tempdir.write('index.html', b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws')", "# Test upload to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync',", "to bucket without prefix uploader.upload_gallery('testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude',", "from unittest import mock import os import subprocess from testfixtures import TempDirectory from", "b'') gallery_path = os.path.join(tempdir.path, 'index.html') uploader = get_uploader('aws') # Test upload to bucket", "bucket uploader.upload_gallery('s3://testbucket/path/', gallery_path) subprocess_run.assert_called_with( ['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store']) # Test", "from testfixtures import TempDirectory from simplegallery.upload.uploader_factory import get_uploader class AWSUploaderTestCase(unittest.TestCase): def test_no_location(self): uploader" ]
[ "font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont()", "generated from reading ui file 'add_subject.ui' # # Created by: PyQt4 UI code", "-*- coding: utf-8 -*- # Form implementation generated from reading ui file 'add_subject.ui'", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal,", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190,", "self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None))", "self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year =", "self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111,", "self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import sys app", "text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig)", "self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\",", "changes made in this file will be lost! from PyQt4 import QtCore, QtGui", "QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font)", "AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig):", "<reponame>kithsirij/NLP-based-Syllabus-Coverage-Exam-paper-checker-Tool # -*- coding: utf-8 -*- # Form implementation generated from reading ui", "'add_subject.ui' # # Created by: PyQt4 UI code generator 4.11.4 # # WARNING!", "self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "\"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\",", "QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font)", "self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject", "self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off)", "font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject)", "230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")),", "SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\",", "self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font)", "New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font", "def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject):", "_encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except", "self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "\"CANCEL\", None)) if __name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject =", "\"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import", "icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111,", "QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font", "= QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def", "reading ui file 'add_subject.ui' # # Created by: PyQt4 UI code generator 4.11.4", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165,", "QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290,", "self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font =", "QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None))", "__name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui =", "131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject =", "import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject() ui.setupUi(Dialog_add_subject) Dialog_add_subject.show()", "return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon()", "QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91,", "None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\",", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21))", "Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year", "ui file 'add_subject.ui' # # Created by: PyQt4 UI code generator 4.11.4 #", "QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font =", "from reading ui file 'add_subject.ui' # # Created by: PyQt4 UI code generator", "None)) if __name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog()", "font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20))", "# WARNING! All changes made in this file will be lost! from PyQt4", "290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon()", "QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text,", "file 'add_subject.ui' # # Created by: PyQt4 UI code generator 4.11.4 # #", "Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\",", "self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\",", "self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "= QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21))", "None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None))", "= QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21))", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91,", "self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel =", "except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text,", "= QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD", "self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT", "text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig):", "21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject)", "by: PyQt4 UI code generator 4.11.4 # # WARNING! All changes made in", "# Form implementation generated from reading ui file 'add_subject.ui' # # Created by:", "_encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object):", "\"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject() ui.setupUi(Dialog_add_subject)", "QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font)", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190,", "self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\"))", "icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject)", "return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text,", "file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8", "disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self,", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31))", "coding: utf-8 -*- # Form implementation generated from reading ui file 'add_subject.ui' #", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off)", "91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester", "-*- # Form implementation generated from reading ui file 'add_subject.ui' # # Created", "4.11.4 # # WARNING! All changes made in this file will be lost!", "self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year =", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel,", "self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s", "QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font)", "NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\",", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290,", "font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont()", "New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20))", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self,", "_fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context,", "self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\"))", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151,", "self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75)", "QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont()", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190,", "this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 =", "91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save =", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal,", "20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font =", "= QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340,", "Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font =", "Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50,", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30,", "self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235,", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31))", "def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context,", "QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font)", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")),", "Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\"))", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name =", "\"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__", "try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding =", "self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 =", "self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError:", "made in this file will be lost! from PyQt4 import QtCore, QtGui try:", "= QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81,", "PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return", "utf-8 -*- # Form implementation generated from reading ui file 'add_subject.ui' # #", "# -*- coding: utf-8 -*- # Form implementation generated from reading ui file", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20,", "text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont()", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220,", "21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject)", "= QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190,", "None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv)", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester =", "# # WARNING! All changes made in this file will be lost! from", "= QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10)", "self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20,", "try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding)", "Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon", "font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont()", "21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject)", "self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\"))", "font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont()", "374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon()", "self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font =", "self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\"))", "generator 4.11.4 # # WARNING! All changes made in this file will be", "self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31))", "160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject)", "\"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None))", "s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig,", "QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding", "All changes made in this file will be lost! from PyQt4 import QtCore,", "None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import sys", "QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font", "# Created by: PyQt4 UI code generator 4.11.4 # # WARNING! All changes", "_fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8", "self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font =", "self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if", "None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\",", "lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def", "Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font =", "PyQt4 UI code generator 4.11.4 # # WARNING! All changes made in this", "disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class", "self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31)) font = QtGui.QFont()", "QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2", "Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\"))", "QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31))", "font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont()", "Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "== \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject()", "self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "# # Created by: PyQt4 UI code generator 4.11.4 # # WARNING! All", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230,", "_translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\"))", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject)", "Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name =", "font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont()", "New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2)", "QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\")) self.comboBox_year = QtGui.QComboBox(Dialog_add_subject) self.comboBox_year.setGeometry(QtCore.QRect(190, 91, 111, 31))", "code generator 4.11.4 # # WARNING! All changes made in this file will", "95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester", "icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111,", "30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font)", "QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font", "Created by: PyQt4 UI code generator 4.11.4 # # WARNING! All changes made", "self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\":", "self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\"))", "Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\",", "290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 =", "UI code generator 4.11.4 # # WARNING! All changes made in this file", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off)", "from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s):", "= QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14)", "QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\",", "QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font)", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject)", "111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")),", "81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester =", "def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10)", "text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568,", "self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\",", "implementation generated from reading ui file 'add_subject.ui' # # Created by: PyQt4 UI", "= QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError:", "self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try:", "font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\",", "235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject", "icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1) self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject)", "font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font", "self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\",", "165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save", "disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return", "New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321,", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95,", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50,", "20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times", "Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\",", "= QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None))", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear)", "self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New", "New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_cancel.setIcon(icon2) self.pushButton_cancel.setIconSize(QtCore.QSize(20, 20)) self.pushButton_cancel.setObjectName(_fromUtf8(\"pushButton_cancel\"))", "return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context,", "Form implementation generated from reading ui file 'add_subject.ui' # # Created by: PyQt4", "def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return", "self.pushButton_save.setIconSize(QtCore.QSize(20, 20)) self.pushButton_save.setObjectName(_fromUtf8(\"pushButton_save\")) self.pushButton_cancel = QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont()", "font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon)", "Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font =", "QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context,", "New Roman\")) font.setPointSize(12) self.comboBox_semester.setFont(font) self.comboBox_semester.setObjectName(_fromUtf8(\"comboBox_semester\")) self.retranslateUi(Dialog_add_subject) QtCore.QObject.connect(self.pushButton_cancel, QtCore.SIGNAL(_fromUtf8(\"clicked()\")), self.lineEdit_subject_name.clear) QtCore.QMetaObject.connectSlotsByName(Dialog_add_subject) def retranslateUi(self, Dialog_add_subject):", "sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui = Ui_Dialog_add_subject() ui.setupUi(Dialog_add_subject) Dialog_add_subject.show() sys.exit(app.exec_())", "font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font =", "icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name = QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131,", "self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1", "QtGui.QLabel(Dialog_add_subject) self.lbl_subject_name.setGeometry(QtCore.QRect(50, 235, 131, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font)", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) self.pushButton_save.setIcon(icon1)", "= QtGui.QPushButton(Dialog_add_subject) self.pushButton_cancel.setGeometry(QtCore.QRect(340, 290, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font)", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject)", "\"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None)) self.label_year.setText(_translate(\"Dialog_add_subject\", \"YEAR\", None))", "Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\")) self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31))", "in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8", "= QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160,", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) self.pushButton_cancel.setFont(font) icon2 = QtGui.QIcon() icon2.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/if_draw-08_725558.png\")), QtGui.QIcon.Normal,", "Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font =", "New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon = QtGui.QIcon() icon.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/4zIr6y.jpg\")), QtGui.QIcon.Normal, QtGui.QIcon.Off) Dialog_add_subject.setWindowIcon(icon) self.lbl_subject_name", "QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True)", "New Roman\")) font.setPointSize(12) self.label_year.setFont(font) self.label_year.setObjectName(_fromUtf8(\"label_year\")) self.label_semester = QtGui.QLabel(Dialog_add_subject) self.label_semester.setGeometry(QtCore.QRect(50, 165, 91, 21)) font", "31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) self.pushButton_save.setFont(font) icon1 = QtGui.QIcon() icon1.addPixmap(QtGui.QPixmap(_fromUtf8(\"Qt_interface/SE_syllabus/Save-as.png\")),", "= QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "Dialog_add_subject.resize(568, 374) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(10) Dialog_add_subject.setFont(font) Dialog_add_subject.setContextMenuPolicy(QtCore.Qt.CustomContextMenu) icon =", "if __name__ == \"__main__\": import sys app = QtGui.QApplication(sys.argv) Dialog_add_subject = QtGui.QDialog() ui", "151, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(14) font.setBold(True) font.setWeight(75) self.label_add_subject.setFont(font) self.label_add_subject.setObjectName(_fromUtf8(\"label_add_subject\"))", "self.lineEdit_subject_name = QtGui.QLineEdit(Dialog_add_subject) self.lineEdit_subject_name.setGeometry(QtCore.QRect(190, 230, 321, 31)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\"))", "_translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text,", "QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def", "New Roman\")) font.setPointSize(12) self.comboBox_year.setFont(font) self.comboBox_year.setObjectName(_fromUtf8(\"comboBox_year\")) self.comboBox_semester = QtGui.QComboBox(Dialog_add_subject) self.comboBox_semester.setGeometry(QtCore.QRect(190, 160, 111, 31)) font", "None)) self.label_semester.setText(_translate(\"Dialog_add_subject\", \"SEMESTER\", None)) self.pushButton_save.setText(_translate(\"Dialog_add_subject\", \"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ ==", "disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_Dialog_add_subject(object): def setupUi(self, Dialog_add_subject): Dialog_add_subject.setObjectName(_fromUtf8(\"Dialog_add_subject\")) Dialog_add_subject.resize(568, 374)", "will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except", "= QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50, 95, 81, 21)) font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12)", "font = QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.lineEdit_subject_name.setFont(font) self.lineEdit_subject_name.setObjectName(_fromUtf8(\"lineEdit_subject_name\")) self.label_year = QtGui.QLabel(Dialog_add_subject) self.label_year.setGeometry(QtCore.QRect(50,", "New Roman\")) font.setPointSize(12) self.lbl_subject_name.setFont(font) self.lbl_subject_name.setObjectName(_fromUtf8(\"lbl_subject_name\")) self.label_add_subject = QtGui.QLabel(Dialog_add_subject) self.label_add_subject.setGeometry(QtCore.QRect(220, 30, 151, 31)) font", "retranslateUi(self, Dialog_add_subject): Dialog_add_subject.setWindowTitle(_translate(\"Dialog_add_subject\", \"Dialog\", None)) self.lbl_subject_name.setText(_translate(\"Dialog_add_subject\", \"SUBJECT NAME\", None)) self.label_add_subject.setText(_translate(\"Dialog_add_subject\", \"ADD SUBJECT\", None))", "WARNING! All changes made in this file will be lost! from PyQt4 import", "\"SAVE\", None)) self.pushButton_cancel.setText(_translate(\"Dialog_add_subject\", \"CANCEL\", None)) if __name__ == \"__main__\": import sys app =", "QtGui.QFont() font.setFamily(_fromUtf8(\"Times New Roman\")) font.setPointSize(12) self.label_semester.setFont(font) self.label_semester.setObjectName(_fromUtf8(\"label_semester\")) self.pushButton_save = QtGui.QPushButton(Dialog_add_subject) self.pushButton_save.setGeometry(QtCore.QRect(190, 290, 111," ]
[ "receiver for handle the fact that test runner calls syncdb for several databases", "We connect receiver here and not in unit test code because we need", "That is, sequence of # actions would be: # # 1. Test runner", "['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE =", "# actions would be: # # 1. Test runner imports this module. #", "2. We connect receiver. # 3. Test runner calls syncdb for create default", "== SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs # we need", "several times for some of them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args", "class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models)", "= PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args", "for several databases and several times for some of them. \"\"\" def __init__(self):", "= self.call_counter + 1 self.call_args = kwargs # we need to test only", "receiver. # 3. Test runner calls syncdb for create default database. # 4.", "= False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None def", "'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object):", "kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs # we", "here and not in unit test code because we need to # connect", "this module. # 2. We connect receiver. # 3. Test runner calls syncdb", "We connect receiver. # 3. Test runner calls syncdb for create default database.", "# connect receiver before test runner creates database. That is, sequence of #", "SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None", "# 4. Test runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver,", "# 1. Test runner imports this module. # 2. We connect receiver. #", "because we need to # connect receiver before test runner creates database. That", "Special receiver for handle the fact that test runner calls syncdb for several", "False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self,", "django.db.models import signals from django.test import TestCase from django.core import management from django.utils", "'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter =", "Test runner imports this module. # 2. We connect receiver. # 3. Test", "calls syncdb for create default database. # 4. Test runner execute our unit", "= 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter", "'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def", "1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle the fact", "sender, **kwargs): # Although test runner calls syncdb for several databases, # testing", "the fact that test runner calls syncdb for several databases and several times", "quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args =", "for several databases, # testing for only one of them is quite sufficient.", "= 0 self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter", "self.call_counter = self.call_counter + 1 self.call_args = kwargs # we need to test", "django.core import management from django.utils import six from shared_models import models PRE_SYNCDB_ARGS =", "management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS))", "def __call__(self, signal, sender, **kwargs): # Although test runner calls syncdb for several", "several databases, # testing for only one of them is quite sufficient. if", "runner imports this module. # 2. We connect receiver. # 3. Test runner", "unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter,", "sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r,", "def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE,", "testing for only one of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE:", "database. # 4. Test runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver()", "= self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver for", "of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not in unit", "'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class", "for create default database. # 4. Test runner execute our unit test code.", "= None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args", "= OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r", "__call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs class", "SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs # we need to", "4. Test runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models)", "Although test runner calls syncdb for several databases, # testing for only one", "r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'], SYNCDB_INTERACTIVE) self.assertEqual(args['db'], 'default')", "def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO())", "connect receiver. # 3. Test runner calls syncdb for create default database. #", "syncdb for several databases, # testing for only one of them is quite", "connect receiver here and not in unit test code because we need to", "= None def __call__(self, signal, sender, **kwargs): # Although test runner calls syncdb", "for handle the fact that test runner calls syncdb for several databases and", "self.call_args = kwargs # we need to test only one call of syncdb", "PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1", "= kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle the fact that test", "signal, sender, **kwargs): # Although test runner calls syncdb for several databases, #", "\"\"\" def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal, sender,", "databases, # testing for only one of them is quite sufficient. if kwargs['db']", "only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and", "signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not in unit test code", "signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver()", "# Although test runner calls syncdb for several databases, # testing for only", "SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0", "Test runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class", "kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle the fact that test runner", "test runner calls syncdb for several databases, # testing for only one of", "would be: # # 1. Test runner imports this module. # 2. We", "self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE,", "handle the fact that test runner calls syncdb for several databases and several", "__init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter", "= 0 self.call_args = None def __call__(self, signal, sender, **kwargs): # Although test", "is, sequence of # actions would be: # # 1. Test runner imports", "some of them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args = None def", "runner calls syncdb for several databases, # testing for only one of them", "of # actions would be: # # 1. Test runner imports this module.", "our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self):", "# testing for only one of them is quite sufficient. if kwargs['db'] ==", "__call__(self, signal, sender, **kwargs): # Although test runner calls syncdb for several databases,", "Test runner calls syncdb for create default database. # 4. Test runner execute", "be: # # 1. Test runner imports this module. # 2. We connect", "from django.test import TestCase from django.core import management from django.utils import six from", "that test runner calls syncdb for several databases and several times for some", "several databases and several times for some of them. \"\"\" def __init__(self): self.call_counter", "sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args),", "test runner calls syncdb for several databases and several times for some of", "self.call_counter = 0 self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter =", "django.utils import six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive',", "TestCase from django.core import management from django.utils import six from shared_models import models", "self.call_counter + 1 self.call_args = kwargs # we need to test only one", "test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here", "runner calls syncdb for create default database. # 4. Test runner execute our", "# we need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) #", "<gh_stars>1-10 from django.db.models import signals from django.test import TestCase from django.core import management", "sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\"", "self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1", "if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs #", "sender=models) # We connect receiver here and not in unit test code because", "test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1)", "3. Test runner calls syncdb for create default database. # 4. Test runner", "unit test code because we need to # connect receiver before test runner", "def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal, sender, **kwargs):", "# 3. Test runner calls syncdb for create default database. # 4. Test", "execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def", "from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE =", "databases and several times for some of them. \"\"\" def __init__(self): self.call_counter =", "1 self.call_args = kwargs # we need to test only one call of", "runner execute our unit test code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase):", "signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object):", "def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs", "PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter,", "verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models)", "+ 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle the", "of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter +", "self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle the fact that", "one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not", "signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1)", "them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1", "from django.utils import six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity',", "test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY,", "receiver here and not in unit test code because we need to #", "to # connect receiver before test runner creates database. That is, sequence of", "OneTimeReceiver(object): \"\"\" Special receiver for handle the fact that test runner calls syncdb", "management from django.utils import six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models',", "0 self.call_args = None def __call__(self, signal, sender, **kwargs): # Although test runner", "need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect", "__init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal, sender, **kwargs): #", "SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self):", "them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal,", "self.call_counter = 0 self.call_args = None def __call__(self, signal, sender, **kwargs): # Although", "for only one of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter", "+ 1 self.call_args = kwargs # we need to test only one call", "create default database. # 4. Test runner execute our unit test code. pre_syncdb_receiver", "database. That is, sequence of # actions would be: # # 1. Test", "syncdb for create default database. # 4. Test runner execute our unit test", "database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'],", "1. Test runner imports this module. # 2. We connect receiver. # 3.", "'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE = False", "one of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter", "0 self.call_args = None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter +", "= ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY = 1 SYNCDB_INTERACTIVE", "pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self):", "in unit test code because we need to # connect receiver before test", "syncdb for several databases and several times for some of them. \"\"\" def", "args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'], SYNCDB_INTERACTIVE)", "syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not in unit test", "need to # connect receiver before test runner creates database. That is, sequence", "test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args", "sequence of # actions would be: # # 1. Test runner imports this", "signals from django.test import TestCase from django.core import management from django.utils import six", "= kwargs # we need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver,", "PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal, sender,", "import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY", "self.call_args = None def __call__(self, signal, sender, **kwargs): # Although test runner calls", "from django.core import management from django.utils import six from shared_models import models PRE_SYNCDB_ARGS", "**kwargs): # Although test runner calls syncdb for several databases, # testing for", "# We connect receiver here and not in unit test code because we", "we need to # connect receiver before test runner creates database. That is,", "# 2. We connect receiver. # 3. Test runner calls syncdb for create", "import management from django.utils import six from shared_models import models PRE_SYNCDB_ARGS = ['app',", "and not in unit test code because we need to # connect receiver", "kwargs # we need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models)", "r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args =", "is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args", "import TestCase from django.core import management from django.utils import six from shared_models import", "module. # 2. We connect receiver. # 3. Test runner calls syncdb for", "class OneTimeReceiver(object): \"\"\" Special receiver for handle the fact that test runner calls", "interactive=SYNCDB_INTERACTIVE, load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'],", "times for some of them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args =", "fact that test runner calls syncdb for several databases and several times for", "calls syncdb for several databases, # testing for only one of them is", "not in unit test code because we need to # connect receiver before", "test code because we need to # connect receiver before test runner creates", "and several times for some of them. \"\"\" def __init__(self): self.call_counter = 0", "to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver", "None def __call__(self, signal, sender, **kwargs): self.call_counter = self.call_counter + 1 self.call_args =", "six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE", "for some of them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args = None", "imports this module. # 2. We connect receiver. # 3. Test runner calls", "calls syncdb for several databases and several times for some of them. \"\"\"", "= 1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args", "connect receiver before test runner creates database. That is, sequence of # actions", "actions would be: # # 1. Test runner imports this module. # 2.", "default database. # 4. Test runner execute our unit test code. pre_syncdb_receiver =", "self.call_counter = self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver", "1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb', database=SYNCDB_DATABASE, verbosity=SYNCDB_VERBOSITY, interactive=SYNCDB_INTERACTIVE, load_initial_data=False,", "OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r =", "receiver before test runner creates database. That is, sequence of # actions would", "test runner creates database. That is, sequence of # actions would be: #", "\"\"\" Special receiver for handle the fact that test runner calls syncdb for", "only one of them is quite sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter =", "# # 1. Test runner imports this module. # 2. We connect receiver.", "None def __call__(self, signal, sender, **kwargs): # Although test runner calls syncdb for", "stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'],", "django.test import TestCase from django.core import management from django.utils import six from shared_models", "sufficient. if kwargs['db'] == SYNCDB_DATABASE: self.call_counter = self.call_counter + 1 self.call_args = kwargs", "self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special receiver for handle", "shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default'", "SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def test_pre_syncdb_args(self): r = PreSyncdbReceiver() signals.pre_syncdb.connect(r, sender=models) management.call_command('syncdb',", "from django.db.models import signals from django.test import TestCase from django.core import management from", "import six from shared_models import models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db']", "code because we need to # connect receiver before test runner creates database.", "runner calls syncdb for several databases and several times for some of them.", "runner creates database. That is, sequence of # actions would be: # #", "code. pre_syncdb_receiver = OneTimeReceiver() signals.pre_syncdb.connect(pre_syncdb_receiver, sender=models) class SyncdbSignalTests(TestCase): def test_pre_syncdb_call_time(self): self.assertEqual(pre_syncdb_receiver.call_counter, 1) def", "creates database. That is, sequence of # actions would be: # # 1.", "= r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY) self.assertEqual(args['interactive'], SYNCDB_INTERACTIVE) self.assertEqual(args['db'],", "1 SYNCDB_INTERACTIVE = False class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args =", "before test runner creates database. That is, sequence of # actions would be:", "call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We connect receiver here and not in", "models PRE_SYNCDB_ARGS = ['app', 'create_models', 'verbosity', 'interactive', 'db'] SYNCDB_DATABASE = 'default' SYNCDB_VERBOSITY =", "we need to test only one call of syncdb signals.pre_syncdb.disconnect(pre_syncdb_receiver, sender=models) # We", "**kwargs): self.call_counter = self.call_counter + 1 self.call_args = kwargs class OneTimeReceiver(object): \"\"\" Special", "import signals from django.test import TestCase from django.core import management from django.utils import", "load_initial_data=False, stdout=six.StringIO()) args = r.call_args self.assertEqual(r.call_counter, 1) self.assertEqual(set(args), set(PRE_SYNCDB_ARGS)) self.assertEqual(args['app'], models) self.assertEqual(args['verbosity'], SYNCDB_VERBOSITY)", "class PreSyncdbReceiver(object): def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self, signal,", "of them. \"\"\" def __init__(self): self.call_counter = 0 self.call_args = None def __call__(self," ]
[ "use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4 numbers in", "it in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port)", "KIND, either express or implied. # See the License for the specific language", "Unless required by applicable law or agreed to in writing, software # distributed", "number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node = name + number", "if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node)", "= os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4 numbers in the job", "else: default_port = 12910 # ----------------------- # PORT NUMBER = MASTER_PORT # -----------------------", "# PORT NUMBER = MASTER_PORT # ----------------------- # in case the user passed", "this file except in compliance with the License. # You may obtain a", "Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"])", "return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) ->", "def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if", "SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True @staticmethod def detect() -> bool:", "str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self,", "the job id as the id default_port = default_port[-4:] # all ports should", "import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__)", "ANY KIND, either express or implied. # See the License for the specific", "import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a", "detect() -> bool: \"\"\"Returns ``True`` if the current process was launched on a", "NUMBER = MASTER_PORT # ----------------------- # in case the user passed it in", "----------------------- # in case the user passed it in if \"MASTER_PORT\" in os.environ:", "= number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node = name + number return", "number # ----------------------- # this way every process knows what port to use", "return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) ->", "called, but setting world size is not allowed. Ignored.\") def global_rank(self) -> int:", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is not allowed. Ignored.\") def", "logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster managed by SLURM.\"\"\"", "-> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called,", "OF ANY KIND, either express or implied. # See the License for the", "-> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is not allowed. Ignored.\")", "4 numbers in the job id as the id default_port = default_port[-4:] #", "Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version", "default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self)", "was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self)", "a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True @staticmethod", "def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def", "= root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int: # -----------------------", "os.environ @property def main_address(self) -> str: # figure out the root node addr", "re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for", "passed it in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] =", "addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node =", "and # limitations under the License. import logging import os import re from", "def detect() -> bool: \"\"\"Returns ``True`` if the current process was launched on", "def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank", "True @staticmethod def detect() -> bool: \"\"\"Returns ``True`` if the current process was", "= MASTER_PORT # ----------------------- # in case the user passed it in if", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "training on a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return", "default_port = default_port[-4:] # all ports should be in the 10k+ range default_port", "global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was", "self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int:", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "was called, but setting global rank is not allowed. Ignored.\") def local_rank(self) ->", "None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is not allowed. Ignored.\") def", "# limitations under the License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment", "maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0] number", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "should be in the 10k+ range default_port = int(default_port) + 15000 else: default_port", "in os.environ @property def main_address(self) -> str: # figure out the root node", "class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster managed by SLURM.\"\"\" @property", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "figure out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node =", "knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the", "if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\")", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "default_port[-4:] # all ports should be in the 10k+ range default_port = int(default_port)", "required by applicable law or agreed to in writing, software # distributed under", "name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number:", "main_port(self) -> int: # ----------------------- # SLURM JOB = PORT number # -----------------------", "applicable law or agreed to in writing, software # distributed under the License", "slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR:", "log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int: # ----------------------- # SLURM", "The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0", "or agreed to in writing, software # distributed under the License is distributed", "int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but", "to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4 numbers", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "the specific language governing permissions and # limitations under the License. import logging", "return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "writing, software # distributed under the License is distributed on an \"AS IS\"", "root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] =", "return root_node @property def main_port(self) -> int: # ----------------------- # SLURM JOB =", "int: # ----------------------- # SLURM JOB = PORT number # ----------------------- # this", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class", "----------------------- # SLURM JOB = PORT number # ----------------------- # this way every", "int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if \"[\" in root_node: name, numbers", "License. # You may obtain a copy of the License at # #", "# use the last 4 numbers in the job id as the id", "compliance with the License. # You may obtain a copy of the License", "size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is not", "environment for training on a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) ->", "id as the id default_port = default_port[-4:] # all ports should be in", "{os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int: # ----------------------- # SLURM JOB", "int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but", "for the specific language governing permissions and # limitations under the License. import", "main_address(self) -> str: # figure out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\")", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "default_port: # use the last 4 numbers in the job id as the", "specific language governing permissions and # limitations under the License. import logging import", "from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training", "= PORT number # ----------------------- # this way every process knows what port", "+ 15000 else: default_port = 12910 # ----------------------- # PORT NUMBER = MASTER_PORT", "-> str: if \"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number =", "-> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if \"[\" in", "not use this file except in compliance with the License. # You may", "the 10k+ range default_port = int(default_port) + 15000 else: default_port = 12910 #", "int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str:", "License, Version 2.0 (the \"License\"); # you may not use this file except", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size", "\")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\")", "if the current process was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in", "local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self,", "# you may not use this file except in compliance with the License.", "int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is not allowed.", "agreed to in writing, software # distributed under the License is distributed on", "port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4", "os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int:", "called, but setting global rank is not allowed. Ignored.\") def local_rank(self) -> int:", "10k+ range default_port = int(default_port) + 15000 else: default_port = 12910 # -----------------------", "# ----------------------- # in case the user passed it in if \"MASTER_PORT\" in", "(the \"License\"); # you may not use this file except in compliance with", "in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT:", "= 12910 # ----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- # in", "-> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called,", "def creates_processes_externally(self) -> bool: return True @staticmethod def detect() -> bool: \"\"\"Returns ``True``", "@property def creates_processes_externally(self) -> bool: return True @staticmethod def detect() -> bool: \"\"\"Returns", "-> str: # figure out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if", "this way every process knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if", "# Unless required by applicable law or agreed to in writing, software #", "by applicable law or agreed to in writing, software # distributed under the", "bool: \"\"\"Returns ``True`` if the current process was launched on a SLURM cluster.\"\"\"", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0]", "numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\",", "str: # figure out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist:", "root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node", "Lightning team. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node", "file except in compliance with the License. # You may obtain a copy", "root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else:", "on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str:", "str: if \"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\",", "License for the specific language governing permissions and # limitations under the License.", "world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was", "# all ports should be in the 10k+ range default_port = int(default_port) +", "range default_port = int(default_port) + 15000 else: default_port = 12910 # ----------------------- #", "cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True @staticmethod def", "set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is", "to in writing, software # distributed under the License is distributed on an", "in the 10k+ range default_port = int(default_port) + 15000 else: default_port = 12910", "implied. # See the License for the specific language governing permissions and #", "\"-\" in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node =", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\"", "size is not allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self,", "the current process was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ", "slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\"", "# in case the user passed it in if \"MASTER_PORT\" in os.environ: default_port", "not allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int)", "cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str: # figure out", "is not allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) ->", "by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True @staticmethod def detect() ->", "team. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "or implied. # See the License for the specific language governing permissions and", "= numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\",", "= os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node", "License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log =", "number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0] number =", "def main_address(self) -> str: # figure out the root node addr slurm_nodelist =", "15000 else: default_port = 12910 # ----------------------- # PORT NUMBER = MASTER_PORT #", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "-> bool: \"\"\"Returns ``True`` if the current process was launched on a SLURM", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "def main_port(self) -> int: # ----------------------- # SLURM JOB = PORT number #", "def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank", "= os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) ->", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "\"\"\"Cluster environment for training on a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self)", "-> int: # ----------------------- # SLURM JOB = PORT number # ----------------------- #", "for training on a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool:", "current process was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property", "# Copyright The PyTorch Lightning team. # # Licensed under the Apache License,", "----------------------- # this way every process knows what port to use default_port =", "\"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if", "return \"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str: # figure out the", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "on a cluster managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True", "os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster", "if default_port: # use the last 4 numbers in the job id as", "int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is not allowed.", "-> bool: return True @staticmethod def detect() -> bool: \"\"\"Returns ``True`` if the", "= self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) ->", "use this file except in compliance with the License. # You may obtain", "ports should be in the 10k+ range default_port = int(default_port) + 15000 else:", "= logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster managed by", "was called, but setting world size is not allowed. Ignored.\") def global_rank(self) ->", "job id as the id default_port = default_port[-4:] # all ports should be", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return", "as the id default_port = default_port[-4:] # all ports should be in the", "user passed it in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"]", "2.0 (the \"License\"); # you may not use this file except in compliance", "os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4 numbers in the job id", "numbers in the job id as the id default_port = default_port[-4:] # all", "in case the user passed it in if \"MASTER_PORT\" in os.environ: default_port =", "the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0]", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "but setting global rank is not allowed. Ignored.\") def local_rank(self) -> int: return", "resolve_root_node_address(self, root_node: str) -> str: if \"[\" in root_node: name, numbers = root_node.split(\"[\",", "Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None:", "number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node = name +", "# ----------------------- # SLURM JOB = PORT number # ----------------------- # this way", "way every process knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port:", "rank is not allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self)", "-> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is not allowed. Ignored.\")", "# # Unless required by applicable law or agreed to in writing, software", "log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster managed", "default_port = 12910 # ----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- #", "express or implied. # See the License for the specific language governing permissions", "global rank is not allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def", "os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def", "world size is not allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def", "set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is", "int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None:", "either express or implied. # See the License for the specific language governing", "allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) ->", "in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node = name", "int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world", "governing permissions and # limitations under the License. import logging import os import", "----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- # in case the user", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"]", "default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last 4 numbers in the", "id default_port = default_port[-4:] # all ports should be in the 10k+ range", "int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str)", "PORT number # ----------------------- # this way every process knows what port to", "but setting world size is not allowed. Ignored.\") def global_rank(self) -> int: return", "\"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return", "use the last 4 numbers in the job id as the id default_port", "root_node: str) -> str: if \"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1)", "language governing permissions and # limitations under the License. import logging import os", "process was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def", "the License. # You may obtain a copy of the License at #", "int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if \"[\" in root_node:", "if \"-\" in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node", "root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self)", "case the user passed it in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"]", "12910 # ----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- # in case", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "is not allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank:", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "= \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property", "\"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str: # figure out the root", "int(default_port) + 15000 else: default_port = 12910 # ----------------------- # PORT NUMBER =", "setting world size is not allowed. Ignored.\") def global_rank(self) -> int: return int(os.environ[\"SLURM_PROCID\"])", "root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in", "a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str: #", "def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size", "SLURM JOB = PORT number # ----------------------- # this way every process knows", "os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int: #", "@property def main_port(self) -> int: # ----------------------- # SLURM JOB = PORT number", "int(os.environ[\"SLURM_PROCID\"]) def set_global_rank(self, rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global", "with the License. # You may obtain a copy of the License at", "log.debug(\"SLURMEnvironment.set_world_size was called, but setting world size is not allowed. Ignored.\") def global_rank(self)", "return True @staticmethod def detect() -> bool: \"\"\"Returns ``True`` if the current process", "the id default_port = default_port[-4:] # all ports should be in the 10k+", "root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def main_port(self) -> int: # ----------------------- #", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "be in the 10k+ range default_port = int(default_port) + 15000 else: default_port =", "= slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node", "= default_port[-4:] # all ports should be in the 10k+ range default_port =", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "str) -> str: if \"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number", "return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if \"[\" in root_node: name,", "# this way every process knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\")", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "creates_processes_externally(self) -> bool: return True @staticmethod def detect() -> bool: \"\"\"Returns ``True`` if", "what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use the last", "@property def main_address(self) -> str: # figure out the root node addr slurm_nodelist", "limitations under the License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import", "rank: int) -> None: log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is not", "maxsplit=1)[0] if \"-\" in number: number = number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number)", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "the last 4 numbers in the job id as the id default_port =", "in os.environ: default_port = os.environ[\"MASTER_PORT\"] else: os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port)", "process knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: # use", "permissions and # limitations under the License. import logging import os import re", "See the License for the specific language governing permissions and # limitations under", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "= root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number =", "import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment", "all ports should be in the 10k+ range default_port = int(default_port) + 15000", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "PORT NUMBER = MASTER_PORT # ----------------------- # in case the user passed it", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "# ----------------------- # this way every process knows what port to use default_port", "last 4 numbers in the job id as the id default_port = default_port[-4:]", "the License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log", "log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size:", "not allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int:", "out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\"", "managed by SLURM.\"\"\" @property def creates_processes_externally(self) -> bool: return True @staticmethod def detect()", "node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node: str) -> str: if \"[\"", "bool: return True @staticmethod def detect() -> bool: \"\"\"Returns ``True`` if the current", "return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int) -> None: log.debug(\"SLURMEnvironment.set_world_size was called, but setting", "root_node @property def main_port(self) -> int: # ----------------------- # SLURM JOB = PORT", "\"\"\"Returns ``True`` if the current process was launched on a SLURM cluster.\"\"\" return", "{os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def set_world_size(self, size: int)", "\"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return root_node @property def", "log.debug(\"SLURMEnvironment.set_global_rank was called, but setting global rank is not allowed. Ignored.\") def local_rank(self)", "Version 2.0 (the \"License\"); # you may not use this file except in", "except in compliance with the License. # You may obtain a copy of", "number.split(\"-\")[0] number = re.sub(\"[^0-9]\", \"\", number) root_node = name + number return root_node", "os.environ[\"MASTER_PORT\"] = str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"])", "default_port = int(default_port) + 15000 else: default_port = 12910 # ----------------------- # PORT", "SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self) -> str: # figure", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "MASTER_PORT # ----------------------- # in case the user passed it in if \"MASTER_PORT\"", "= str(default_port) log.debug(f\"MASTER_PORT: {os.environ['MASTER_PORT']}\") return int(default_port) def world_size(self) -> int: return int(os.environ[\"SLURM_NTASKS\"]) def", "else: root_node = \"127.0.0.1\" root_node = self.resolve_root_node_address(root_node) os.environ[\"MASTER_ADDR\"] = root_node log.debug(f\"MASTER_ADDR: {os.environ['MASTER_ADDR']}\") return", "@staticmethod def detect() -> bool: \"\"\"Returns ``True`` if the current process was launched", "``True`` if the current process was launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\"", "numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0] if \"-\" in number: number", "JOB = PORT number # ----------------------- # this way every process knows what", "launched on a SLURM cluster.\"\"\" return \"SLURM_NTASKS\" in os.environ @property def main_address(self) ->", "-> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return int(os.environ[\"SLURM_NODEID\"]) def resolve_root_node_address(self, root_node:", "PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the", "pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on", "ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster", "# SLURM JOB = PORT number # ----------------------- # this way every process", "# figure out the root node addr slurm_nodelist = os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node", "os.environ.get(\"SLURM_NODELIST\") if slurm_nodelist: root_node = slurm_nodelist.split(\" \")[0].split(\",\")[0] else: root_node = \"127.0.0.1\" root_node =", "if \"[\" in root_node: name, numbers = root_node.split(\"[\", maxsplit=1) number = numbers.split(\",\", maxsplit=1)[0]", "def resolve_root_node_address(self, root_node: str) -> str: if \"[\" in root_node: name, numbers =", "the user passed it in if \"MASTER_PORT\" in os.environ: default_port = os.environ[\"MASTER_PORT\"] else:", "allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"]) def node_rank(self) -> int: return", "import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment log = logging.getLogger(__name__) class SLURMEnvironment(ClusterEnvironment):", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "= int(default_port) + 15000 else: default_port = 12910 # ----------------------- # PORT NUMBER", "under the License. import logging import os import re from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment", "in the job id as the id default_port = default_port[-4:] # all ports", "every process knows what port to use default_port = os.environ.get(\"SLURM_JOB_ID\") if default_port: #", "setting global rank is not allowed. Ignored.\") def local_rank(self) -> int: return int(os.environ[\"SLURM_LOCALID\"])", "SLURMEnvironment(ClusterEnvironment): \"\"\"Cluster environment for training on a cluster managed by SLURM.\"\"\" @property def", "# ----------------------- # PORT NUMBER = MASTER_PORT # ----------------------- # in case the" ]
[ "import os import math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball", "right, switch, state = trackball.read() # Send movements and clicks to xte if", "<filename>examples/mouse.py #!/usr/bin/env python import time import os import math from trackball import TrackBall", "sudo apt install xautomation?\") while True: up, down, left, right, switch, state =", "left, right, switch, state = trackball.read() # Send movements and clicks to xte", "time import os import math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the", "as a mouse in Raspbian, with right-click when the switch is pressed. Press", "while True: up, down, left, right, switch, state = trackball.read() # Send movements", "you sudo apt install xautomation?\") while True: up, down, left, right, switch, state", "mouse in Raspbian, with right-click when the switch is pressed. Press Ctrl+C to", "= trackball.read() # Send movements and clicks to xte if switch: cmd =", "to control mouse) use_xte = os.system('which xte') == 0 if use_xte == 0:", "import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a mouse in Raspbian, with", "the switch is pressed. Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0,", "== 0 if use_xte == 0: raise RuntimeError(\"xte not found. Did you sudo", "xte if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right or up", "right - left x = math.copysign(x**2, x) y = down - up y", "- left x = math.copysign(x**2, x) y = down - up y =", "0) # Check for xte (used to control mouse) use_xte = os.system('which xte')", "= os.system('which xte') == 0 if use_xte == 0: raise RuntimeError(\"xte not found.", "0, 0) # Check for xte (used to control mouse) use_xte = os.system('which", "1\"' os.system(cmd) elif right or up or left or down: x = right", "raise RuntimeError(\"xte not found. Did you sudo apt install xautomation?\") while True: up,", "a mouse in Raspbian, with right-click when the switch is pressed. Press Ctrl+C", "use_xte = os.system('which xte') == 0 if use_xte == 0: raise RuntimeError(\"xte not", "left x = math.copysign(x**2, x) y = down - up y = math.copysign(y**2,", "0 if use_xte == 0: raise RuntimeError(\"xte not found. Did you sudo apt", "TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used to control mouse)", "Check for xte (used to control mouse) use_xte = os.system('which xte') == 0", "= right - left x = math.copysign(x**2, x) y = down - up", "= 'xte \"mouseclick 1\"' os.system(cmd) elif right or up or left or down:", "# Send movements and clicks to xte if switch: cmd = 'xte \"mouseclick", "= math.copysign(x**2, x) y = down - up y = math.copysign(y**2, y) cmd", "# Check for xte (used to control mouse) use_xte = os.system('which xte') ==", "RuntimeError(\"xte not found. Did you sudo apt install xautomation?\") while True: up, down,", "== 0: raise RuntimeError(\"xte not found. Did you sudo apt install xautomation?\") while", "os.system('which xte') == 0 if use_xte == 0: raise RuntimeError(\"xte not found. Did", "switch, state = trackball.read() # Send movements and clicks to xte if switch:", "xte (used to control mouse) use_xte = os.system('which xte') == 0 if use_xte", "left or down: x = right - left x = math.copysign(x**2, x) y", "#!/usr/bin/env python import time import os import math from trackball import TrackBall print(\"\"\"Trackball:", "when the switch is pressed. Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4)", "pressed. Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0)", "= down - up y = math.copysign(y**2, y) cmd = 'xte \"mousermove {}", "Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) #", "down - up y = math.copysign(y**2, y) cmd = 'xte \"mousermove {} {}\"'.format(int(x),", "found. Did you sudo apt install xautomation?\") while True: up, down, left, right,", "if use_xte == 0: raise RuntimeError(\"xte not found. Did you sudo apt install", "TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a mouse in Raspbian, with right-click", "Send movements and clicks to xte if switch: cmd = 'xte \"mouseclick 1\"'", "trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a mouse in Raspbian,", "math.copysign(x**2, x) y = down - up y = math.copysign(y**2, y) cmd =", "(used to control mouse) use_xte = os.system('which xte') == 0 if use_xte ==", "apt install xautomation?\") while True: up, down, left, right, switch, state = trackball.read()", "to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for", "use_xte == 0: raise RuntimeError(\"xte not found. Did you sudo apt install xautomation?\")", "control mouse) use_xte = os.system('which xte') == 0 if use_xte == 0: raise", "= TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used to control", "from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a mouse in", "math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a mouse", "trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used to control mouse) use_xte", "'xte \"mouseclick 1\"' os.system(cmd) elif right or up or left or down: x", "x = math.copysign(x**2, x) y = down - up y = math.copysign(y**2, y)", "the trackball as a mouse in Raspbian, with right-click when the switch is", "install xautomation?\") while True: up, down, left, right, switch, state = trackball.read() #", "trackball as a mouse in Raspbian, with right-click when the switch is pressed.", "mouse) use_xte = os.system('which xte') == 0 if use_xte == 0: raise RuntimeError(\"xte", "right or up or left or down: x = right - left x", "right-click when the switch is pressed. Press Ctrl+C to exit! \"\"\") trackball =", "to xte if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right or", "and clicks to xte if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif", "xautomation?\") while True: up, down, left, right, switch, state = trackball.read() # Send", "import time import os import math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use", "or left or down: x = right - left x = math.copysign(x**2, x)", "down: x = right - left x = math.copysign(x**2, x) y = down", "switch is pressed. Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0,", "y = down - up y = math.copysign(y**2, y) cmd = 'xte \"mousermove", "Use the trackball as a mouse in Raspbian, with right-click when the switch", "exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte", "cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right or up or left or", "0: raise RuntimeError(\"xte not found. Did you sudo apt install xautomation?\") while True:", "if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right or up or", "0, 0, 0) # Check for xte (used to control mouse) use_xte =", "- up y = math.copysign(y**2, y) cmd = 'xte \"mousermove {} {}\"'.format(int(x), int(y))", "\"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used", "\"mouseclick 1\"' os.system(cmd) elif right or up or left or down: x =", "os import math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as", "os.system(cmd) elif right or up or left or down: x = right -", "up y = math.copysign(y**2, y) cmd = 'xte \"mousermove {} {}\"'.format(int(x), int(y)) os.system(cmd)", "down, left, right, switch, state = trackball.read() # Send movements and clicks to", "import math from trackball import TrackBall print(\"\"\"Trackball: Mouse Use the trackball as a", "is pressed. Press Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0,", "trackball.read() # Send movements and clicks to xte if switch: cmd = 'xte", "print(\"\"\"Trackball: Mouse Use the trackball as a mouse in Raspbian, with right-click when", "with right-click when the switch is pressed. Press Ctrl+C to exit! \"\"\") trackball", "Ctrl+C to exit! \"\"\") trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check", "up or left or down: x = right - left x = math.copysign(x**2,", "not found. Did you sudo apt install xautomation?\") while True: up, down, left,", "Did you sudo apt install xautomation?\") while True: up, down, left, right, switch,", "y = math.copysign(y**2, y) cmd = 'xte \"mousermove {} {}\"'.format(int(x), int(y)) os.system(cmd) time.sleep(0.0001)", "elif right or up or left or down: x = right - left", "switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right or up or left", "state = trackball.read() # Send movements and clicks to xte if switch: cmd", "or up or left or down: x = right - left x =", "x) y = down - up y = math.copysign(y**2, y) cmd = 'xte", "Mouse Use the trackball as a mouse in Raspbian, with right-click when the", "in Raspbian, with right-click when the switch is pressed. Press Ctrl+C to exit!", "up, down, left, right, switch, state = trackball.read() # Send movements and clicks", "python import time import os import math from trackball import TrackBall print(\"\"\"Trackball: Mouse", "movements and clicks to xte if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd)", "or down: x = right - left x = math.copysign(x**2, x) y =", "Raspbian, with right-click when the switch is pressed. Press Ctrl+C to exit! \"\"\")", "True: up, down, left, right, switch, state = trackball.read() # Send movements and", "clicks to xte if switch: cmd = 'xte \"mouseclick 1\"' os.system(cmd) elif right", "xte') == 0 if use_xte == 0: raise RuntimeError(\"xte not found. Did you", "for xte (used to control mouse) use_xte = os.system('which xte') == 0 if", "trackball = TrackBall(interrupt_pin=4) trackball.set_rgbw(0, 0, 0, 0) # Check for xte (used to", "x = right - left x = math.copysign(x**2, x) y = down -" ]
[ "layer(s). The function should return a tf.Tensor. learn_std (bool): Is std trainable. init_std", "Initial value for std. adaptive_std (bool): Is std a neural network. If False,", "should return a tf.Tensor. hidden_b_init (callable): Initializer function for the bias of intermediate", "is at most the value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable):", "function should return a tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial", "std_parameterization (str): How the std should be parametrized. There are two options: -", "input placeholder(s). Args: state_input (tf.Tensor): Place holder for state input. name (str): Inner", "share the same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the", "the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__(", "two options: - exp: the logarithm of the std will be stored, and", "hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity,", "the configuration and parameters of the primitive. Args: name (str): Name of the", "hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False,", "it will be a parameter. std_share_network (bool): Boolean for whether mean and std", "example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std.", "softplus: the std will be computed as log(1+exp(x)) layer_normalization (bool): Bool for using", "computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape,", "hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s). The function", "# Since regressor expects [N, *dims], we need to squeeze the extra #", "input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(),", "[ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def", "for output dense layer. It should return a tf.Tensor. Set it to None", "MLP consists of two hidden layers, each with 32 hidden units. min_std (float):", "vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var,", "to be different from source model if cloned under the same computational graph.", "y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var',", "garage.tf.models.Model class. This class can be used to perform regression by fitting a", "units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It should return a", "network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of key(str) for the network outputs.", "Activation function for intermediate dense layer(s). It should return a tf.Tensor. Set it", "tf.Tensor. Set it to None to maintain a linear activation. std_output_w_init (callable): Initializer", "(callable): Activation function for intermediate dense layer(s). It should return a tf.Tensor. Set", "deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This", "are two options: - exp: the logarithm of the std will be stored,", "we need to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with", "hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes,", "clone(self, name): \"\"\"Return a clone of the model. It copies the configuration and", "for std. adaptive_std (bool): Is std a neural network. If False, it will", "Is std trainable. init_std (float): Initial value for std. adaptive_std (bool): Is std", "state_input (tf.Tensor): Place holder for state input. name (str): Inner model name, also", "cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor", "Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla", "max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in", "a exponential transformation - softplus: the std will be computed as log(1+exp(x)) layer_normalization", "1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var", "32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It should", "y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input -", "std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity,", "the weight of intermediate dense layer(s). The function should return a tf.Tensor. hidden_b_init", "from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on", "output dense layer(s). The function should return a tf.Tensor. output_b_init (callable): Initializer function", "min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network", "of the training data. output_dim (int): Output dimension of the model. name (str):", "function should return a tf.Tensor. output_b_init (callable): Initializer function for the bias of", "a tf.Tensor. output_b_init (callable): Initializer function for the bias of output dense layer(s).", "a parameter. std_share_network (bool): Boolean for whether mean and std share the same", "layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network,", "= tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable(", "= tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with", "to maintain a linear activation. output_w_init (callable): Initializer function for the weight of", "function for the bias of output dense layer(s). The function should return a", "function for the weight of output dense layer(s) in the std network. std_parameterization", "for the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std',", "Initializer function for the weight of output dense layer(s). The function should return", "std_hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s) in the", "(callable): Initializer function for the bias of intermediate dense layer(s) in the std", "of output dense layer(s). The function should return a tf.Tensor. learn_std (bool): Is", "dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var", "bias of output dense layer(s). The function should return a tf.Tensor. learn_std (bool):", "graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim,", "dense layer(s). The function should return a tf.Tensor. learn_std (bool): Is std trainable.", "garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity,", "to None to maintain a linear activation. hidden_w_init (callable): Initializer function for the", "Initializer function for the bias of output dense layer(s). The function should return", "dense layer(s) for the MLP for std. For example, (32, 32) means the", "function for the bias of intermediate dense layer(s). The function should return a", "import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can be", "the std will be computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer", "input. name (str): Inner model name, also the variable scope of the inner", "be used to perform regression by fitting a Gaussian distribution to the outputs.", "Place holder for state input. name (str): Inner model name, also the variable", "+ self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(),", "a linear activation. hidden_w_init (callable): Initializer function for the weight of intermediate dense", "in the std network. std_hidden_w_init (callable): Initializer function for the weight of intermediate", "trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input", "x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone of the model.", "to maintain a linear activation. std_output_w_init (callable): Initializer function for the weight of", "list[str]: List of key(str) for the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean',", "loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var)", "If not None, the std is at least the value of min_std, to", "function for output dense layer in the std network. It should return a", "= input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of key(str) for", "a linear activation. output_w_init (callable): Initializer function for the weight of output dense", "std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec. Return:", "the same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP", "* y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist =", "function for output dense layer. It should return a tf.Tensor. Set it to", "(float): If not None, the std is at most the value of max_std,", "self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of key(str)", "placeholder(s). Args: state_input (tf.Tensor): Place holder for state input. name (str): Inner model", "min_std, to avoid numerical issues. max_std (float): If not None, the std is", "is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag:", "least the value of min_std, to avoid numerical issues. max_std (float): If not", "maintain a linear activation. std_output_w_init (callable): Initializer function for the weight of output", "= self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std,", "Since regressor expects [N, *dims], we need to squeeze the extra # dimension", "layer in the std network. std_hidden_w_init (callable): Initializer function for the weight of", "regressor expects [N, *dims], we need to squeeze the extra # dimension normalized_dist_log_std", "std. adaptive_std (bool): Is std a neural network. If False, it will be", "std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init,", "the std network. std_output_nonlinearity (callable): Activation function for output dense layer in the", "if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\"", "self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False)", "computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or not. \"\"\"", "activation. hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s). The", "If False, it will be a parameter. std_share_network (bool): Boolean for whether mean", "dimension of dense layer(s) for the MLP for std. For example, (32, 32)", "in the std network. It should return a tf.Tensor. Set it to None", "training data. output_dim (int): Output dimension of the model. name (str): Model name,", "None to maintain a linear activation. std_output_w_init (callable): Initializer function for the weight", "+ self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape,", "Set it to None to maintain a linear activation. hidden_w_init (callable): Initializer function", "std will be computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization", "layer(s) in the std network. std_output_nonlinearity (callable): Activation function for output dense layer", "parametrized. There are two options: - exp: the logarithm of the std will", "tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var,", "the variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP", "should return a tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer. It", "to avoid numerical issues. max_std (float): If not None, the std is at", "output dense layer. It should return a tf.Tensor. Set it to None to", "np import tensorflow as tf import tensorflow_probability as tfp from garage.experiment import deterministic", "std a neural network. If False, it will be a parameter. std_share_network (bool):", "Name of the newly created model. It has to be different from source", "the MLP for mean. For example, (32, 32) means the MLP consists of", "network. std_parameterization (str): How the std should be parametrized. There are two options:", "name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std,", "function for the bias of intermediate dense layer(s) in the std network. std_output_nonlinearity", "output_dim (int): Output dimension of the model. name (str): Model name, also the", "clone of the model. It copies the configuration and parameters of the primitive.", "newly created model. It has to be different from source model if cloned", "as tf import tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models import", "return a tf.Tensor. Set it to None to maintain a linear activation. hidden_w_init", "network. If False, it will be a parameter. std_share_network (bool): Boolean for whether", "network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for std.", "hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std,", "and parameters of the primitive. Args: name (str): Name of the newly created", "same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__( name=name,", "copies the configuration and parameters of the primitive. Args: name (str): Name of", "max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim,", "self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean,", "dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(),", "of dense layer(s) for the MLP for mean. For example, (32, 32) means", "stored, and applied a exponential transformation - softplus: the std will be computed", "seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32,", "tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for", "log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag(", "key(str) for the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean',", "consists of two hidden layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation", "normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var", "This class can be used to perform regression by fitting a Gaussian distribution", "std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def", "input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std,", "(callable): Initializer function for the weight of intermediate dense layer(s). The function should", "(str): How the std should be parametrized. There are two options: - exp:", "a tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial value for std.", "if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean.", "(list[int]): Output dimension of dense layer(s) for the MLP for std. For example,", "self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std,", "of the std will be stored, and applied a exponential transformation - softplus:", "dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std", "There are two options: - exp: the logarithm of the std will be", "data. output_dim (int): Output dimension of the model. name (str): Model name, also", "for the bias of output dense layer(s). The function should return a tf.Tensor.", "data. tf.Tensor: Mean for label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var", "tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean", "the model. It copies the configuration and parameters of the primitive. Args: name", "garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla", "std_share_network (bool): Boolean for whether mean and std share the same network. std_hidden_sizes", "the newly created model. It has to be different from source model if", "the bias of intermediate dense layer(s) in the std network. std_output_nonlinearity (callable): Activation", "should return a tf.Tensor. Set it to None to maintain a linear activation.", "two hidden layers, each with 32 hidden units. min_std (float): If not None,", "32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0,", "not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(),", "name): \"\"\"Return a clone of the model. It copies the configuration and parameters", "model. It copies the configuration and parameters of the primitive. Args: name (str):", "Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std", "for the weight of output dense layer(s) in the std network. std_parameterization (str):", "import tensorflow as tf import tensorflow_probability as tfp from garage.experiment import deterministic from", "min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False):", "mean and std share the same network. std_hidden_sizes (list[int]): Output dimension of dense", "log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for", "Model name, also the variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s)", "seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std,", "consists of two hidden layers, each with 32 hidden units. min_std (float): If", "the std is at least the value of min_std, to avoid numerical issues.", "(callable): Initializer function for the weight of output dense layer(s). The function should", "It should return a tf.Tensor. Set it to None to maintain a linear", "std_output_w_init (callable): Initializer function for the weight of output dense layer(s) in the", "'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model given input placeholder(s). Args: state_input", "neural network. If False, it will be a parameter. std_share_network (bool): Boolean for", "hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6,", "dense layer in the std network. It should return a tf.Tensor. Set it", "of the model. name (str): Model name, also the variable scope. hidden_sizes (list[int]):", "Activation function for output dense layer in the std network. It should return", "tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var',", "the outputs. Args: input_shape (tuple[int]): Input shape of the training data. output_dim (int):", "two hidden layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation function for", "extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean *", "function for the weight of intermediate dense layer(s) in the std network. std_hidden_b_init", "adaptive_std (bool): Is std a neural network. If False, it will be a", "= tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable(", "x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone of the model. It", "y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone of the model. It copies", "\"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init,", "std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters = self.parameters return new_regressor", "output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform(", "MLP for mean. For example, (32, 32) means the MLP consists of two", "weight of output dense layer(s) in the std network. std_parameterization (str): How the", "tf.Tensor: Mean for label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var =", "= tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var,", "The function should return a tf.Tensor. learn_std (bool): Is std trainable. init_std (float):", "network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std',", "mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std.", "name (str): Name of the newly created model. It has to be different", "init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self):", "linear activation. output_w_init (callable): Initializer function for the weight of output dense layer(s).", "dense layer(s) in the std network. std_parameterization (str): How the std should be", "initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False)", "Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla", "_, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we", "be different from source model if cloned under the same computational graph. Returns:", "the MLP consists of two hidden layers, each with 32 hidden units. hidden_nonlinearity", "dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var", "return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ]", "normalization or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform(", "tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel):", "(normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name):", "the variable scope of the inner model, if exist. One example is garage.tf.models.Sequential.", "Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution.", "should return a tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial value", "of the inner model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed", "Args: input_shape (tuple[int]): Input shape of the training data. output_dim (int): Output dimension", "std will be stored, and applied a exponential transformation - softplus: the std", "model if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model.", "normalized_xs_var) # Since regressor expects [N, *dims], we need to squeeze the extra", "GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can be used to perform", "tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist,", "max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output", "for data. tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std for", "the MLP for std. For example, (32, 32) means the MLP consists of", "for label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean',", "under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor =", "maintain a linear activation. output_w_init (callable): Initializer function for the weight of output", "layer(s). It should return a tf.Tensor. Set it to None to maintain a", "tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std for label. \"\"\"", "maintain a linear activation. hidden_w_init (callable): Initializer function for the weight of intermediate", "the std is at most the value of max_std, to avoid numerical issues.", "will be a parameter. std_share_network (bool): Boolean for whether mean and std share", "layer(s). The function should return a tf.Tensor. output_nonlinearity (callable): Activation function for output", "garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can", "std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std,", "it to None to maintain a linear activation. std_output_w_init (callable): Initializer function for", "to perform regression by fitting a Gaussian distribution to the outputs. Args: input_shape", "Returns: garage.tf.policies.GaussianMLPModel: Newly cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes,", "the value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each", "class can be used to perform regression by fitting a Gaussian distribution to", "init_std (float): Initial value for std. adaptive_std (bool): Is std a neural network.", "Output dimension of the model. name (str): Model name, also the variable scope.", "for state input. name (str): Inner model name, also the variable scope of", "super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we need to squeeze the", "initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var =", "Initializer function for the weight of intermediate dense layer(s) in the std network.", "layer(s). The function should return a tf.Tensor. hidden_b_init (callable): Initializer function for the", "std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init,", "y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag(", "std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters", "Args: state_input (tf.Tensor): Place holder for state input. name (str): Inner model name,", ") + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) +", "it to None to maintain a linear activation. hidden_w_init (callable): Initializer function for", "+ y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean,", "and applied a exponential transformation - softplus: the std will be computed as", "in the std network. std_parameterization (str): How the std should be parametrized. There", "intermediate dense layer(s) in the std network. std_output_nonlinearity (callable): Activation function for output", "for the bias of intermediate dense layer(s) in the std network. std_output_nonlinearity (callable):", "std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]:", "applied a exponential transformation - softplus: the std will be computed as log(1+exp(x))", "primitive. Args: name (str): Name of the newly created model. It has to", "adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform(", "Initializer function for the bias of intermediate dense layer(s). The function should return", "tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std", "log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std for label. \"\"\" with", "Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input shape of the training", "numpy as np import tensorflow as tf import tensorflow_probability as tfp from garage.experiment", "each with 32 hidden units. min_std (float): If not None, the std is", "the std network. std_hidden_w_init (callable): Initializer function for the weight of intermediate dense", "name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False,", "intermediate dense layer(s). It should return a tf.Tensor. Set it to None to", "not None, the std is at most the value of max_std, to avoid", "of the newly created model. It has to be different from source model", "layer. It should return a tf.Tensor. Set it to None to maintain a", "Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean for", "output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init,", "output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape", "in the std network. std_output_nonlinearity (callable): Activation function for output dense layer in", "model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity,", ") + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32,", "max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters = self.parameters return", "of intermediate dense layer(s). The function should return a tf.Tensor. output_nonlinearity (callable): Activation", "bias of intermediate dense layer(s). The function should return a tf.Tensor. output_nonlinearity (callable):", "each hidden layer in the std network. std_hidden_w_init (callable): Initializer function for the", "network. std_hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s) in", "class. This class can be used to perform regression by fitting a Gaussian", "for using layer normalization or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32,", "- softplus: the std will be computed as log(1+exp(x)) layer_normalization (bool): Bool for", "a tf.Tensor. hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s).", "example, (32, 32) means the MLP consists of two hidden layers, each with", "variable scope of the inner model, if exist. One example is garage.tf.models.Sequential. Return:", "has to be different from source model if cloned under the same computational", "to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the", "It copies the configuration and parameters of the primitive. Args: name (str): Name", "shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _,", "issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the std network. std_hidden_w_init", "to None to maintain a linear activation. std_output_w_init (callable): Initializer function for the", "tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var", "std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name,", "should return a tf.Tensor. output_b_init (callable): Initializer function for the bias of output", "- exp: the logarithm of the std will be stored, and applied a", "\"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can be used to perform regression", "a neural network. If False, it will be a parameter. std_share_network (bool): Boolean", "std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List", "weight of intermediate dense layer(s) in the std network. std_hidden_b_init (callable): Initializer function", "function for the weight of output dense layer(s). The function should return a", "value for std. adaptive_std (bool): Is std a neural network. If False, it", "adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape", "a tf.Tensor. Set it to None to maintain a linear activation. output_w_init (callable):", "dimension of dense layer(s) for the MLP for mean. For example, (32, 32)", "shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(),", "return a tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer. It should", "dense layer(s). The function should return a tf.Tensor. hidden_b_init (callable): Initializer function for", "name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var", "new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init,", "a tf.Tensor. Set it to None to maintain a linear activation. std_output_w_init (callable):", "import tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class", "hidden layers, each with 32 hidden units. min_std (float): If not None, the", "for output dense layer in the std network. It should return a tf.Tensor.", "learn_std (bool): Is std trainable. init_std (float): Initial value for std. adaptive_std (bool):", "of two hidden layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation function", "std. For example, (32, 32) means the MLP consists of two hidden layers,", "hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std,", "mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor:", "std trainable. init_std (float): Initial value for std. adaptive_std (bool): Is std a", "whether mean and std share the same network. std_hidden_sizes (list[int]): Output dimension of", "from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class", "def clone(self, name): \"\"\"Return a clone of the model. It copies the configuration", "avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the std", "Initializer function for the weight of output dense layer(s) in the std network.", "the model. name (str): Model name, also the variable scope. hidden_sizes (list[int]): Output", "will be stored, and applied a exponential transformation - softplus: the std will", "'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None):", "means the MLP consists of two hidden layers, each with 32 hidden units.", "at most the value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity", "name=None): \"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor): Place holder for state", "std_output_nonlinearity (callable): Activation function for output dense layer in the std network. It", "for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape,", "hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It should return", "expects [N, *dims], we need to squeeze the extra # dimension normalized_dist_log_std =", "tensorflow as tf import tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models", "Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor:", "layer(s) for the MLP for std. For example, (32, 32) means the MLP", "(str): Model name, also the variable scope. hidden_sizes (list[int]): Output dimension of dense", "intermediate dense layer(s). The function should return a tf.Tensor. output_nonlinearity (callable): Activation function", "output_nonlinearity (callable): Activation function for output dense layer. It should return a tf.Tensor.", "for the weight of intermediate dense layer(s) in the std network. std_hidden_b_init (callable):", "same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for", "to None to maintain a linear activation. output_w_init (callable): Initializer function for the", "tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var',", "vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone", "*dims], we need to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1)", "spec. Return: list[str]: List of key(str) for the network outputs. \"\"\" return [", "= (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) #", "input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of key(str) for the", "the weight of output dense layer(s) in the std network. std_parameterization (str): How", "normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var)", "configuration and parameters of the primitive. Args: name (str): Name of the newly", "std is at most the value of max_std, to avoid numerical issues. std_hidden_nonlinearity", "scope of the inner model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag:", "as np import tensorflow as tf import tensorflow_probability as tfp from garage.experiment import", "tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial value for std. adaptive_std", "learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None,", "32) means the MLP consists of two hidden layers, each with 32 hidden", "'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model", "model name, also the variable scope of the inner model, if exist. One", "bias of intermediate dense layer(s) in the std network. std_output_nonlinearity (callable): Activation function", "a linear activation. std_output_w_init (callable): Initializer function for the weight of output dense", "\"\"\"Return a clone of the model. It copies the configuration and parameters of", "state_input, name=None): \"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor): Place holder for", "import numpy as np import tensorflow as tf import tensorflow_probability as tfp from", "scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def", "of intermediate dense layer(s) in the std network. std_hidden_b_init (callable): Initializer function for", "fitting a Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input shape of", "Output dimension of dense layer(s) for the MLP for std. For example, (32,", "label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32,", "output_w_init (callable): Initializer function for the weight of output dense layer(s). The function", "self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32,", "\"\"\"GaussianMLPRegressorModel.\"\"\" import numpy as np import tensorflow as tf import tensorflow_probability as tfp", "initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var =", "need to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'):", "parameter. std_share_network (bool): Boolean for whether mean and std share the same network.", "(int): Output dimension of the model. name (str): Model name, also the variable", "of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer", "std should be parametrized. There are two options: - exp: the logarithm of", "x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var =", "outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean',", "= normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var)", "tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist,", "to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var", "dense layer(s). The function should return a tf.Tensor. output_b_init (callable): Initializer function for", "Args: name (str): Name of the newly created model. It has to be", "Mean for label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable(", "layer_normalization (bool): Bool for using layer normalization or not. \"\"\" def __init__(self, input_shape,", "can be used to perform regression by fitting a Gaussian distribution to the", "source model if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly cloned", "model. It has to be different from source model if cloned under the", "by fitting a Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input shape", "min_std (float): If not None, the std is at least the value of", "of key(str) for the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist',", "for mean. For example, (32, 32) means the MLP consists of two hidden", "options: - exp: the logarithm of the std will be stored, and applied", "dense layer(s). The function should return a tf.Tensor. output_nonlinearity (callable): Activation function for", "shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, )", "(float): If not None, the std is at least the value of min_std,", "distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std", "log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or not. \"\"\" def __init__(self,", "shape of the training data. output_dim (int): Output dimension of the model. name", "be computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or not.", "normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we need", "normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean,", "(tuple[int]): Input shape of the training data. output_dim (int): Output dimension of the", "the weight of output dense layer(s). The function should return a tf.Tensor. output_b_init", "Set it to None to maintain a linear activation. std_output_w_init (callable): Initializer function", "+ tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return", "return a tf.Tensor. learn_std (bool): Is std trainable. init_std (float): Initial value for", "32 hidden units. min_std (float): If not None, the std is at least", "of the model. It copies the configuration and parameters of the primitive. Args:", "garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model", "layer(s) for the MLP for mean. For example, (32, 32) means the MLP", "] def _build(self, state_input, name=None): \"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor):", "loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var,", "None to maintain a linear activation. hidden_w_init (callable): Initializer function for the weight", "tf.Tensor. hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s). The", "of intermediate dense layer(s) in the std network. std_output_nonlinearity (callable): Activation function for", "data. tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor: log_std for label.", "(float): Initial value for std. adaptive_std (bool): Is std a neural network. If", "scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for mean.", "weight of output dense layer(s). The function should return a tf.Tensor. output_b_init (callable):", "= normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var,", "trainable. init_std (float): Initial value for std. adaptive_std (bool): Is std a neural", "'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input,", "hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for mean. For", "def _build(self, state_input, name=None): \"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor): Place", "layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense", "of output dense layer(s). The function should return a tf.Tensor. output_b_init (callable): Initializer", "(callable): Initializer function for the bias of intermediate dense layer(s). The function should", "a Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input shape of the", "tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer. It should return a", "[N, *dims], we need to squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std,", "input_shape (tuple[int]): Input shape of the training data. output_dim (int): Output dimension of", "model. name (str): Model name, also the variable scope. hidden_sizes (list[int]): Output dimension", "the std network. It should return a tf.Tensor. Set it to None to", "output dense layer in the std network. It should return a tf.Tensor. Set", "(callable): Activation function for output dense layer. It should return a tf.Tensor. Set", "<filename>garaged/src/garage/tf/regressors/gaussian_mlp_regressor_model.py \"\"\"GaussianMLPRegressorModel.\"\"\" import numpy as np import tensorflow as tf import tensorflow_probability as", "dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var +", "value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden", "parameters of the primitive. Args: name (str): Name of the newly created model.", "output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std,", "= tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var)", "Nonlinearity for each hidden layer in the std network. std_hidden_w_init (callable): Initializer function", "(bool): Is std a neural network. If False, it will be a parameter.", "= super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we need to squeeze", "variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for", "initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std =", "hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init,", "# dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var", "means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std +", "a tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer. It should return", "x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects", "the MLP consists of two hidden layers, each with 32 hidden units. min_std", "different from source model if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel:", "of output dense layer(s) in the std network. std_parameterization (str): How the std", "each with 32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s).", "created model. It has to be different from source model if cloned under", "normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N, *dims], we need to", "of min_std, to avoid numerical issues. max_std (float): If not None, the std", "std network. It should return a tf.Tensor. Set it to None to maintain", "(bool): Boolean for whether mean and std share the same network. std_hidden_sizes (list[int]):", "at least the value of min_std, to avoid numerical issues. max_std (float): If", "numerical issues. max_std (float): If not None, the std is at most the", "tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor:", "self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False)", "tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean for label. tf.Tensor:", "\"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None,", "Newly cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init,", "std is at least the value of min_std, to avoid numerical issues. max_std", "for the bias of intermediate dense layer(s). The function should return a tf.Tensor.", "intermediate dense layer(s). The function should return a tf.Tensor. hidden_b_init (callable): Initializer function", "(str): Name of the newly created model. It has to be different from", "hidden layer in the std network. std_hidden_w_init (callable): Initializer function for the weight", "label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1,", "log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) +", "and std share the same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s)", "(callable): Activation function for output dense layer in the std network. It should", "y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std))", "name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1,", "std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec.", "tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor:", "mean. For example, (32, 32) means the MLP consists of two hidden layers,", "tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist =", "on garage.tf.models.Model class. This class can be used to perform regression by fitting", "it to None to maintain a linear activation. output_w_init (callable): Initializer function for", "tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data.", "of dense layer(s) for the MLP for std. For example, (32, 32) means", "output spec. Return: list[str]: List of key(str) for the network outputs. \"\"\" return", "= tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std,", "for the weight of intermediate dense layer(s). The function should return a tf.Tensor.", "tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) normalized_xs_var = (state_input - x_mean_var) /", "dense layer(s). It should return a tf.Tensor. Set it to None to maintain", "state input. name (str): Inner model name, also the variable scope of the", "the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean',", "\"\"\"Network output spec. Return: list[str]: List of key(str) for the network outputs. \"\"\"", "scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var)) return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var,", "intermediate dense layer(s) in the std network. std_hidden_b_init (callable): Initializer function for the", "The function should return a tf.Tensor. hidden_b_init (callable): Initializer function for the bias", "hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity,", "the bias of output dense layer(s). The function should return a tf.Tensor. learn_std", "name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network,", "return a tf.Tensor. Set it to None to maintain a linear activation. std_output_w_init", "output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(),", "Initializer function for the bias of intermediate dense layer(s) in the std network.", "used to perform regression by fitting a Gaussian distribution to the outputs. Args:", "the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean", "given input placeholder(s). Args: state_input (tf.Tensor): Place holder for state input. name (str):", "'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model given input placeholder(s).", "None, the std is at most the value of max_std, to avoid numerical", "Activation function for output dense layer. It should return a tf.Tensor. Set it", "hidden layers, each with 32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate", "normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a", "std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the std network. std_hidden_w_init (callable):", "Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean", "means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone of", "layer normalization or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh,", "Return: list[str]: List of key(str) for the network outputs. \"\"\" return [ 'normalized_dist',", "dimension of the model. name (str): Model name, also the variable scope. hidden_sizes", "32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes,", "def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of key(str) for the network", "issues. max_std (float): If not None, the std is at most the value", "None, the std is at least the value of min_std, to avoid numerical", "not None, the std is at least the value of min_std, to avoid", "based on garage.tf.models.Model class. This class can be used to perform regression by", "linear activation. hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s).", "'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build", "avoid numerical issues. max_std (float): If not None, the std is at most", "output dense layer(s) in the std network. std_parameterization (str): How the std should", "min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters = self.parameters", "be stored, and applied a exponential transformation - softplus: the std will be", "function should return a tf.Tensor. hidden_b_init (callable): Initializer function for the bias of", "dense layer. It should return a tf.Tensor. Set it to None to maintain", "squeeze the extra # dimension normalized_dist_log_std = tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var =", "Boolean for whether mean and std share the same network. std_hidden_sizes (list[int]): Output", "activation. output_w_init (callable): Initializer function for the weight of output dense layer(s). The", "model given input placeholder(s). Args: state_input (tf.Tensor): Place holder for state input. name", "name (str): Inner model name, also the variable scope of the inner model,", "inner model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor:", "return (normalized_dist, normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self,", "the bias of intermediate dense layer(s). The function should return a tf.Tensor. output_nonlinearity", "'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model given", "exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor:", "MLP for std. For example, (32, 32) means the MLP consists of two", "the value of min_std, to avoid numerical issues. max_std (float): If not None,", "cloned model. \"\"\" new_regressor = self.__class__( name=name, input_shape=self._input_shape, output_dim=self._output_dim, hidden_sizes=self._hidden_sizes, hidden_nonlinearity=self._hidden_nonlinearity, hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init,", "tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based", "outputs. Args: input_shape (tuple[int]): Input shape of the training data. output_dim (int): Output", "The function should return a tf.Tensor. output_nonlinearity (callable): Activation function for output dense", "the std should be parametrized. There are two options: - exp: the logarithm", "tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor:", "tf.Tensor. output_b_init (callable): Initializer function for the bias of output dense layer(s). The", "init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp',", "activation. std_output_w_init (callable): Initializer function for the weight of output dense layer(s) in", "model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized", "dense layer(s) for the MLP for mean. For example, (32, 32) means the", "using layer normalization or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32),", "Bool for using layer normalization or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel',", "the std network. std_hidden_b_init (callable): Initializer function for the bias of intermediate dense", "for each hidden layer in the std network. std_hidden_w_init (callable): Initializer function for", "weight of intermediate dense layer(s). The function should return a tf.Tensor. hidden_b_init (callable):", "learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization,", "\"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor): Place holder for state input.", "log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return a clone of the", "with 32 hidden units. hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It", "layers, each with 32 hidden units. min_std (float): If not None, the std", "For example, (32, 32) means the MLP consists of two hidden layers, each", "the inner model, if exist. One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution.", "(list[int]): Output dimension of dense layer(s) for the MLP for mean. For example,", "log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data. tf.Tensor: Mean for label.", "std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim),", "return a tf.Tensor. hidden_b_init (callable): Initializer function for the bias of intermediate dense", "If not None, the std is at most the value of max_std, to", "Vanilla mean. tf.Tensor: Vanilla log_std. tf.Tensor: Mean for data. tf.Tensor: log_std for data.", "holder for state input. name (str): Inner model name, also the variable scope", "network. std_hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s) in", "return a tf.Tensor. output_b_init (callable): Initializer function for the bias of output dense", "the weight of intermediate dense layer(s) in the std network. std_hidden_b_init (callable): Initializer", "Initializer function for the weight of intermediate dense layer(s). The function should return", "network. std_output_nonlinearity (callable): Activation function for output dense layer in the std network.", "It has to be different from source model if cloned under the same", "return a tf.Tensor. Set it to None to maintain a linear activation. output_w_init", "a clone of the model. It copies the configuration and parameters of the", "dense layer(s) in the std network. std_output_nonlinearity (callable): Activation function for output dense", "hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s). The function", "Inner model name, also the variable scope of the inner model, if exist.", "linear activation. std_output_w_init (callable): Initializer function for the weight of output dense layer(s)", "name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim), dtype=np.float32,", "exp: the logarithm of the std will be stored, and applied a exponential", "x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N, *dims],", "for whether mean and std share the same network. std_hidden_sizes (list[int]): Output dimension", "for data. tf.Tensor: Mean for label. tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'):", "for the MLP for std. For example, (32, 32) means the MLP consists", "output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh,", "'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model given input", "The function should return a tf.Tensor. output_b_init (callable): Initializer function for the bias", "transformation - softplus: the std will be computed as log(1+exp(x)) layer_normalization (bool): Bool", "output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True,", "layer(s). The function should return a tf.Tensor. output_b_init (callable): Initializer function for the", "of two hidden layers, each with 32 hidden units. min_std (float): If not", "std network. std_hidden_b_init (callable): Initializer function for the bias of intermediate dense layer(s)", "Output dimension of dense layer(s) for the MLP for mean. For example, (32,", "_build(self, state_input, name=None): \"\"\"Build model given input placeholder(s). Args: state_input (tf.Tensor): Place holder", "for the weight of output dense layer(s). The function should return a tf.Tensor.", "layer(s) in the std network. std_parameterization (str): How the std should be parametrized.", "normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist", "output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity,", "layer_normalization=layer_normalization) self._input_shape = input_shape def network_output_spec(self): \"\"\"Network output spec. Return: list[str]: List of", "'y_mean', 'y_std' ] def _build(self, state_input, name=None): \"\"\"Build model given input placeholder(s). Args:", "adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization)", "hidden_w_init=self._hidden_w_init, hidden_b_init=self._hidden_b_init, output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity,", "layer in the std network. It should return a tf.Tensor. Set it to", "the logarithm of the std will be stored, and applied a exponential transformation", "regression by fitting a Gaussian distribution to the outputs. Args: input_shape (tuple[int]): Input", "(callable): Initializer function for the bias of output dense layer(s). The function should", "How the std should be parametrized. There are two options: - exp: the", "units. min_std (float): If not None, the std is at least the value", "logarithm of the std will be stored, and applied a exponential transformation -", "(32, 32) means the MLP consists of two hidden layers, each with 32", "as tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor", "(state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since", "std network. std_output_nonlinearity (callable): Activation function for output dense layer in the std", "with tf.name_scope('std_network'): log_stds_var = normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist", "the primitive. Args: name (str): Name of the newly created model. It has", "__init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "from source model if cloned under the same computational graph. Returns: garage.tf.policies.GaussianMLPModel: Newly", "max_std (float): If not None, the std is at most the value of", "normalized_dist_log_std + tf.math.log(y_std_var) normalized_dist = tfp.distributions.MultivariateNormalDiag( loc=normalized_dist_mean, scale_diag=tf.exp(normalized_dist_log_std)) vanilla_dist = tfp.distributions.MultivariateNormalDiag( loc=means_var, scale_diag=tf.exp(log_stds_var))", "seed=deterministic.get_tf_seed_stream()), std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init,", "will be computed as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or", "output dense layer(s). The function should return a tf.Tensor. learn_std (bool): Is std", "(bool): Bool for using layer normalization or not. \"\"\" def __init__(self, input_shape, output_dim,", "std share the same network. std_hidden_sizes (list[int]): Output dimension of dense layer(s) for", "of intermediate dense layer(s). The function should return a tf.Tensor. hidden_b_init (callable): Initializer", "name, also the variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for", "for the MLP for mean. For example, (32, 32) means the MLP consists", "with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'): log_stds_var =", "(callable): Initializer function for the weight of intermediate dense layer(s) in the std", "List of key(str) for the network outputs. \"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std',", "(tf.Tensor): Place holder for state input. name (str): Inner model name, also the", "name (str): Model name, also the variable scope. hidden_sizes (list[int]): Output dimension of", "numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for each hidden layer in the std network.", "should be parametrized. There are two options: - exp: the logarithm of the", "(str): Inner model name, also the variable scope of the inner model, if", "output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization,", "with 32 hidden units. min_std (float): If not None, the std is at", "\"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(),", "function should return a tf.Tensor. output_nonlinearity (callable): Activation function for output dense layer.", "trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var", "output_nonlinearity=self._output_nonlinearity, output_w_init=self._output_w_init, output_b_init=self._output_b_init, learn_std=self._learn_std, adaptive_std=self._adaptive_std, std_share_network=self._std_share_network, init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init,", "hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None,", "'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std' ] def _build(self,", "(callable): Initializer function for the weight of output dense layer(s) in the std", "trainable=False) y_mean_var = tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable(", "- x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor", "Input shape of the training data. output_dim (int): Output dimension of the model.", "output_b_init (callable): Initializer function for the bias of output dense layer(s). The function", "perform regression by fitting a Gaussian distribution to the outputs. Args: input_shape (tuple[int]):", "function for the weight of intermediate dense layer(s). The function should return a", "output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization)", "seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32), std_hidden_nonlinearity=tf.nn.tanh, std_hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "tf.squeeze(normalized_dist_log_std, 1) with tf.name_scope('mean_network'): means_var = normalized_dist_mean * y_std_var + y_mean_var with tf.name_scope('std_network'):", "std_hidden_sizes (list[int]): Output dimension of dense layer(s) for the MLP for std. For", "to maintain a linear activation. hidden_w_init (callable): Initializer function for the weight of", "to the outputs. Args: input_shape (tuple[int]): Input shape of the training data. output_dim", "class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can be used to", "std_hidden_b_init=tf.zeros_initializer(), std_output_nonlinearity=None, std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity,", "tf.Tensor. Set it to None to maintain a linear activation. output_w_init (callable): Initializer", "hidden units. min_std (float): If not None, the std is at least the", "name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1,", "learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes, std_hidden_nonlinearity=std_hidden_nonlinearity, std_output_nonlinearity=std_output_nonlinearity, std_parameterization=std_parameterization, layer_normalization=layer_normalization) self._input_shape =", "value of min_std, to avoid numerical issues. max_std (float): If not None, the", "One example is garage.tf.models.Sequential. Return: tfp.distributions.MultivariateNormalDiag: Normlizaed distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized", "(bool): Is std trainable. init_std (float): Initial value for std. adaptive_std (bool): Is", "layer(s) in the std network. std_hidden_b_init (callable): Initializer function for the bias of", "network. It should return a tf.Tensor. Set it to None to maintain a", "dense layer(s) in the std network. std_hidden_b_init (callable): Initializer function for the bias", "distribution to the outputs. Args: input_shape (tuple[int]): Input shape of the training data.", "std network. std_parameterization (str): How the std should be parametrized. There are two", "with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False)", "for std. For example, (32, 32) means the MLP consists of two hidden", "super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std,", "/ x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build( normalized_xs_var) # Since regressor expects [N,", "of the primitive. Args: name (str): Name of the newly created model. It", "hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), output_b_init=tf.zeros_initializer(), learn_std=True, adaptive_std=False, std_share_network=False, init_std=1.0, min_std=1e-6, max_std=None, std_hidden_sizes=(32, 32),", "= tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1,", "Is std a neural network. If False, it will be a parameter. std_share_network", "be parametrized. There are two options: - exp: the logarithm of the std", "or not. \"\"\" def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()),", "is at least the value of min_std, to avoid numerical issues. max_std (float):", "distribution. tf.Tensor: Normalized mean. tf.Tensor: Normalized log_std. tfp.distributions.MultivariateNormalDiag: Vanilla distribution. tf.Tensor: Vanilla mean.", "name, also the variable scope of the inner model, if exist. One example", "trainable=False) normalized_xs_var = (state_input - x_mean_var) / x_std_var _, normalized_dist_mean, normalized_dist_log_std = super()._build(", "\"\"\" return [ 'normalized_dist', 'normalized_mean', 'normalized_log_std', 'dist', 'mean', 'log_std', 'x_mean', 'x_std', 'y_mean', 'y_std'", "hidden_nonlinearity (callable): Activation function for intermediate dense layer(s). It should return a tf.Tensor.", "False, it will be a parameter. std_share_network (bool): Boolean for whether mean and", "std_output_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), std_parameterization='exp', layer_normalization=False): super().__init__(output_dim=output_dim, name=name, hidden_sizes=hidden_sizes, hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init,", "std_hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s) in the", "GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class. This class can be used", "MLP consists of two hidden layers, each with 32 hidden units. hidden_nonlinearity (callable):", "a tf.Tensor. Set it to None to maintain a linear activation. hidden_w_init (callable):", "as log(1+exp(x)) layer_normalization (bool): Bool for using layer normalization or not. \"\"\" def", "x_std_var = tf.compat.v1.get_variable( name='x_std_var', shape=(1, ) + self._input_shape, dtype=np.float32, initializer=tf.ones_initializer(), trainable=False) y_mean_var =", "the std will be stored, and applied a exponential transformation - softplus: the", "normalized_dist_mean, normalized_dist_log_std, vanilla_dist, means_var, log_stds_var, x_mean_var, x_std_var, y_mean_var, y_std_var) def clone(self, name): \"\"\"Return", "most the value of max_std, to avoid numerical issues. std_hidden_nonlinearity (callable): Nonlinearity for", "for intermediate dense layer(s). It should return a tf.Tensor. Set it to None", "also the variable scope of the inner model, if exist. One example is", "import deterministic from garage.tf.models import GaussianMLPModel class GaussianMLPRegressorModel(GaussianMLPModel): \"\"\"GaussianMLPRegressor based on garage.tf.models.Model class.", "None to maintain a linear activation. output_w_init (callable): Initializer function for the weight", "function for intermediate dense layer(s). It should return a tf.Tensor. Set it to", "std network. std_hidden_w_init (callable): Initializer function for the weight of intermediate dense layer(s)", "tf import tensorflow_probability as tfp from garage.experiment import deterministic from garage.tf.models import GaussianMLPModel", "(callable): Nonlinearity for each hidden layer in the std network. std_hidden_w_init (callable): Initializer", "exponential transformation - softplus: the std will be computed as log(1+exp(x)) layer_normalization (bool):", "the std network. std_parameterization (str): How the std should be parametrized. There are", "hidden_nonlinearity=hidden_nonlinearity, hidden_w_init=hidden_w_init, hidden_b_init=hidden_b_init, output_nonlinearity=output_nonlinearity, output_w_init=output_w_init, output_b_init=output_b_init, learn_std=learn_std, adaptive_std=adaptive_std, std_share_network=std_share_network, init_std=init_std, min_std=min_std, max_std=max_std, std_hidden_sizes=std_hidden_sizes,", "in the std network. std_hidden_b_init (callable): Initializer function for the bias of intermediate", "the training data. output_dim (int): Output dimension of the model. name (str): Model", "tf.compat.v1.get_variable( name='y_mean_var', shape=(1, self._output_dim), dtype=np.float32, initializer=tf.zeros_initializer(), trainable=False) y_std_var = tf.compat.v1.get_variable( name='y_std_var', shape=(1, self._output_dim),", "Set it to None to maintain a linear activation. output_w_init (callable): Initializer function", "also the variable scope. hidden_sizes (list[int]): Output dimension of dense layer(s) for the", "y_std_var) def clone(self, name): \"\"\"Return a clone of the model. It copies the", "init_std=self._init_std, min_std=self._min_std, max_std=self._max_std, std_hidden_sizes=self._std_hidden_sizes, std_hidden_nonlinearity=self._std_hidden_nonlinearity, std_hidden_w_init=self._std_hidden_w_init, std_hidden_b_init=self._std_hidden_b_init, std_output_nonlinearity=self._std_output_nonlinearity, std_output_w_init=self._std_output_w_init, std_parameterization=self._std_parameterization, layer_normalization=self._layer_normalization) new_regressor.parameters =", "tf.Tensor. Set it to None to maintain a linear activation. hidden_w_init (callable): Initializer", "def __init__(self, input_shape, output_dim, name='GaussianMLPRegressorModel', hidden_sizes=(32, 32), hidden_nonlinearity=tf.nn.tanh, hidden_w_init=tf.initializers.glorot_uniform( seed=deterministic.get_tf_seed_stream()), hidden_b_init=tf.zeros_initializer(), output_nonlinearity=None, output_w_init=tf.initializers.glorot_uniform(", "be a parameter. std_share_network (bool): Boolean for whether mean and std share the", "tf.Tensor: log_std for label. \"\"\" with tf.compat.v1.variable_scope('normalized_vars'): x_mean_var = tf.compat.v1.get_variable( name='x_mean', shape=(1, )" ]
[ "%.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test()", "dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() # Data print('==> Preparing", "loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value", "\"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net)", "targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple):", "[] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def", "as cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets import", "import os import argparse from torch.autograd import Variable from extensions.utils import progress_bar from", "num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir)", "= criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value =", "nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for", "as nn import torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn as", "\"\"\"Computes the precision@k for the specified values of k\"\"\" with torch.no_grad(): maxk =", "progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper", "criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value = loss", "imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args =", "cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets import os", "(inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad():", "argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args", "pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred))", "pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k", "os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([", "= ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count()", "output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res =", "nn import torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn as cudnn", "maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True)", "= loss else: loss_value = loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs,", "label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1:", "the specified values of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0)", "net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5 = 0 total =", "transforms import torchvision.datasets as datasets import os import argparse from torch.autograd import Variable", "correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss = 0 correct_1", "test_loss = 0 correct_1 = 0 correct_5 = 0 total = 0 for", "target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct =", "correct_1 = 0 correct_5 = 0 total = 0 for batch_idx, (inputs, targets)", "datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available()", "net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\")", "+= prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total),", "print('==> Preparing data..') # Data loading code valdir = os.path.join(args.datadir, 'val') normalize =", "device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's", "res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0", "= checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available()", "(%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%, top-5", "torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms", "res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return", "std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset", "valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test", "torchvision.transforms as transforms import torchvision.datasets as datasets import os import argparse from torch.autograd", "Data print('==> Preparing data..') # Data loading code valdir = os.path.join(args.datadir, 'val') normalize", "(test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%, top-5 accuracy: {1:.3f}%'.format(acc1,acc5))", "correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1),", "with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value,", "use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net", "targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets)", "of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred =", "= torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net =", "net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if", "import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path", "batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with", "#imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30)", "def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the specified values of k\"\"\"", "len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return", "loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224,", "criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the specified", "checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device", "prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1", "prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5", "for the specified values of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size =", "Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss", "in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval()", "accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5 += prec5", "= output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res", "os import argparse from torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper", "torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss", "-1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k)", "= target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct", "res def test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5 =", "0 correct_5 = 0 total = 0 for batch_idx, (inputs, targets) in enumerate(testloader):", "transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using", "import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import", "_, pred = output.topk(maxk, 1, True, True) pred = pred.t() correct = pred.eq(target.view(1,", "total += targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f", "= accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5 +=", "topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader),", "the precision@k for the specified values of k\"\"\" with torch.no_grad(): maxk = max(topk)", "]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True,", "import torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as", "= torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s'", "torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' %", "print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device)", "specified values of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _,", "criterion.eval() test_loss = 0 correct_1 = 0 correct_5 = 0 total = 0", "0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet", "test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5 = 0 total", "progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total))", "ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() >", "targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs", "models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir',", "= loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total", "pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint =", "# Data loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],", "batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred = pred.t()", "inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss,", "prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' %", "net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the", "if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss", "0 total = 0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs,", "import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import", "Data loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229,", "checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else", "\"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes", "values of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred", "outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs =", "+= prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)'", "ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper", "= nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k", "RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the specified values of", "= pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0,", "transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader =", "import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets as", "DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to", "torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision", "if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value = loss test_loss +=", "True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k", "import torchvision.transforms as transforms import torchvision.datasets as datasets import os import argparse from", "target, topk=(1,)): \"\"\"Computes the precision@k for the specified values of k\"\"\" with torch.no_grad():", "<filename>test.py import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional", "optim import torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms", "= 0 total = 0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda:", "= 0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets =", "inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args()", "= datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda =", "inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs,", "extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser =", "import torchvision.datasets as datasets import os import argparse from torch.autograd import Variable from", "label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device)", "else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net =", "F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets", "input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7')", "argparse from torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper", "print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu')", "0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir,", "= inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if", "to dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() # Data print('==>", "transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize,", "init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\" if", "for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test():", "help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() # Data", "if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\")", "for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device)", "to input model') args = parser.parse_args() # Data print('==> Preparing data..') # Data", "prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1,", "= transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000", "help='path to input model') args = parser.parse_args() # Data print('==> Preparing data..') #", "= parser.parse_args() # Data print('==> Preparing data..') # Data loading code valdir =", "path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net", "from torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from", "test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0)", "data..') # Data loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456,", "num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input", "loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1 +=", "topk=(1,)): \"\"\"Computes the precision@k for the specified values of k\"\"\" with torch.no_grad(): maxk", "extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models", "= RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the specified values", "print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output,", "import argparse from torch.autograd import Variable from extensions.utils import progress_bar from extensions.model_refinery_wrapper import", "torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets", "from extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from", "= argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model')", "+= targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f |", "0 correct_1 = 0 correct_5 = 0 total = 0 for batch_idx, (inputs,", "extensions.utils import progress_bar from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models", "torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as F", "batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir)", "targets, topk=(1, 5)) total += targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx,", "> 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss()", "%.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%,", "testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path:", "= transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(),", "import torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn as cudnn import", "import torch.nn as nn import torch.optim as optim import torch.nn.functional as F import", "loss_value = loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5))", "with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1,", "net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the", "import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models import", "RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet", "'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256),", "return res def test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5", "% (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy: {0:.3f}%, top-5 accuracy:", "parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input", "= os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test =", "torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(),", "torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target, topk=(1,)):", "True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for", "= correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss = 0", "tuple): loss_value, outputs = loss else: loss_value = loss test_loss += loss_value.item() prec1,", "5)) total += targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss:", "0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset =", "transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test)", "pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in", "total = 0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets", "%s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net =", "loss else: loss_value = loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets,", "use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def accuracy(output, target,", "% args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net,", "shuffle=False, pin_memory=True, num_workers=30) use_cuda = torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint", "'Loss: %.3f | Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss", "# Data print('==> Preparing data..') # Data loading code valdir = os.path.join(args.datadir, 'val')", "transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset,", "code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])", "1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion = RefineryLoss() def", "= [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res", "import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path", "args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery)", "import torch import torch.nn as nn import torch.optim as optim import torch.nn.functional as", "from extensions.model_refinery_wrapper import ModelRefineryWrapper from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from", "parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() # Data print('==> Preparing data..')", "transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader", "if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion", "precision@k for the specified values of k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size", "enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs)", "torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net", "model') args = parser.parse_args() # Data print('==> Preparing data..') # Data loading code", "max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred =", "net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss else:", "outputs = loss else: loss_value = loss test_loss += loss_value.item() prec1, prec5 =", "Preparing data..') # Data loading code valdir = os.path.join(args.datadir, 'val') normalize = transforms.Normalize(mean=[0.485,", "k\"\"\" with torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk,", "datasets import os import argparse from torch.autograd import Variable from extensions.utils import progress_bar", "isinstance(loss, tuple): loss_value, outputs = loss else: loss_value = loss test_loss += loss_value.item()", "correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc: %.3f%%", "| Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1", "torch.no_grad(): outputs = net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs", "= pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk:", "= net(inputs) loss = criterion(outputs, targets) if isinstance(loss, tuple): loss_value, outputs = loss", "keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss = 0 correct_1 =", "from extensions.refinery_loss import RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser", "torchvision.datasets as datasets import os import argparse from torch.autograd import Variable from extensions.utils", "as optim import torch.nn.functional as F import torch.backends.cudnn as cudnn import torchvision import", "testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False, pin_memory=True, num_workers=30) use_cuda", "import RefineryLoss from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch", "k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval()", "as datasets import os import argparse from torch.autograd import Variable from extensions.utils import", "args = parser.parse_args() # Data print('==> Preparing data..') # Data loading code valdir", "correct_5 = 0 total = 0 for batch_idx, (inputs, targets) in enumerate(testloader): if", "topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss", "from models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference')", "ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to", "torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets import os import argparse", "torch.cuda.is_available() print('Using input path: %s' % args.inputdir) checkpoint = torch.load(args.inputdir) init_net = checkpoint['net']", "accuracy(output, target, topk=(1,)): \"\"\"Computes the precision@k for the specified values of k\"\"\" with", "transforms.ToTensor(), normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000,", "correct = pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k =", "1, True, True) pred = pred.t() correct = pred.eq(target.view(1, -1).expand_as(pred)) res = []", "normalize, ]) #imagenet testset = datasets.ImageFolder(valdir, transform_test) num_classes=1000 testloader = torch.utils.data.DataLoader(testset, batch_size=1000, shuffle=False,", "def test(): net.eval() criterion.eval() test_loss = 0 correct_1 = 0 correct_5 = 0", "+= loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total += targets.size(0) correct_1", "import torchvision import torchvision.transforms as transforms import torchvision.datasets as datasets import os import", "parser.parse_args() # Data print('==> Preparing data..') # Data loading code valdir = os.path.join(args.datadir,", "= max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True, True) pred", "loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1, 5)) total +=", "= 0 correct_1 = 0 correct_5 = 0 total = 0 for batch_idx,", "targets.size(0) correct_1 += prec1 correct_5 += prec5 progress_bar(batch_idx, len(testloader), 'Loss: %.3f | Acc:", "torch.no_grad(): maxk = max(topk) batch_size = target.size(0) _, pred = output.topk(maxk, 1, True,", "torch.nn as nn import torch.optim as optim import torch.nn.functional as F import torch.backends.cudnn", "in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs =", "use_cuda: inputs, targets = inputs.cuda(device), targets.cuda(device) with torch.no_grad(): outputs = net(inputs) loss =", "normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224),", "parser.add_argument('--datadir', help='path to dataset') parser.add_argument('--inputdir', help='path to input model') args = parser.parse_args() #", "= torch.device(\"cuda:0\" if torch.cuda.is_available() else \"cpu\") print(device) if torch.cuda.device_count() > 1: print(\"Let's use\",", "as F import torch.backends.cudnn as cudnn import torchvision import torchvision.transforms as transforms import", "input model') args = parser.parse_args() # Data print('==> Preparing data..') # Data loading", "torch.cuda.device_count() > 1: print(\"Let's use\", torch.cuda.device_count(), \"GPUs!\") net = nn.DataParallel(net) net=net.to(device) criterion =", "torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device = torch.device(\"cuda:0\"", "correct_k = correct[:k].view(-1).float().sum(0, keepdim=True) res.append(correct_k) return res def test(): net.eval() criterion.eval() test_loss =", "targets) if isinstance(loss, tuple): loss_value, outputs = loss else: loss_value = loss test_loss", "models import ShuffleNetv2_wrapper from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir',", "pred.eq(target.view(1, -1).expand_as(pred)) res = [] for k in topk: correct_k = correct[:k].view(-1).float().sum(0, keepdim=True)", "0 for batch_idx, (inputs, targets) in enumerate(testloader): if use_cuda: inputs, targets = inputs.cuda(device),", "as transforms import torchvision.datasets as datasets import os import argparse from torch.autograd import", "= torch.load(args.inputdir) init_net = checkpoint['net'] net=init_net.to('cpu') label_refinery=torch.load('./resnet50.t7') net = ModelRefineryWrapper(net, label_refinery) device =", "from models import DiracDeltaNet_wrapper parser = argparse.ArgumentParser(description='PyTorch imagenet inference') parser.add_argument('--datadir', help='path to dataset')", "0.456, 0.406], std=[0.229, 0.224, 0.225]) transform_test = transforms.Compose([ transforms.Resize(256), transforms.CenterCrop(224), transforms.ToTensor(), normalize, ])", "Acc: %.3f%% (%d/%d)' % (test_loss/(batch_idx+1), 100.*float(correct_1)/float(total), correct_1, total)) return 100.*float(correct_1)/float(total),100.*float(correct_5)/float(total),test_loss acc1,acc5,loss=test() print('top-1 accuracy:", "else: loss_value = loss test_loss += loss_value.item() prec1, prec5 = accuracy(outputs, targets, topk=(1,", "loss_value, outputs = loss else: loss_value = loss test_loss += loss_value.item() prec1, prec5", "= 0 correct_5 = 0 total = 0 for batch_idx, (inputs, targets) in" ]
[ "} OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop':", "'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam':", "GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam,", "optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy':", "'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad':", "{ 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta,", "= { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop, 'sgd':", "SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, }", "BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax,", "<filename>paccmann_chemistry/utils/hyperparams.py \"\"\"Model Parameters Module.\"\"\" import torch.optim as optim from .search import SamplingSearch, GreedySearch,", "{ 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop, 'sgd': optim.SGD", "Module.\"\"\" import torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY =", "as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch,", "OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop,", "'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax': optim.Adamax, 'rmsprop': optim.RMSprop, 'sgd': optim.SGD }", "'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad, 'adam': optim.Adam, 'adamax':", "from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch,", "SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = {", "torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling':", "import torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = {", "Parameters Module.\"\"\" import torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY", "SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta': optim.Adadelta, 'adagrad': optim.Adagrad,", "import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch,", "\"\"\"Model Parameters Module.\"\"\" import torch.optim as optim from .search import SamplingSearch, GreedySearch, BeamSearch", "= { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY = { 'adadelta':", ".search import SamplingSearch, GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam':", "BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY =", "GreedySearch, BeamSearch SEARCH_FACTORY = { 'sampling': SamplingSearch, 'greedy': GreedySearch, 'beam': BeamSearch, } OPTIMIZER_FACTORY" ]
[ "return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv", "KIND, either express or implied. # See the License for the specific language", "# Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution)", "Unless required by applicable law or agreed to in writing, software # distributed", "gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############", "posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt =", "w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape =", "= np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _", "tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution,", "the ABCMeta to LayerWithObservations we are not accidentally removing some required TensorFlow magic", "= 123 inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,)", "0 # test kl > 0 when posteriors != priors batch_size = 10", "= 10 params = distribution.parameters posterior_params = { k: [v + 0.5 for", "as np import pytest import tensorflow as tf import tensorflow_probability as tfp from", ") encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim),", "distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv =", "this file except in compliance with the License. # You may obtain a", "posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer", "42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets = test_data", "targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1)", "posteriors != priors batch_size = 10 params = distribution.parameters posterior_params = { k:", "encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1)", "encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses", "Copyright (c) 2021 The GPflux Contributors. # # Licensed under the Apache License,", "w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors(", "ANY KIND, either express or implied. # See the License for the specific", "test_data, w_dim, seed2): seed = 123 inputs, targets = test_data num_data, x_dim =", "inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape)", "43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag(", "fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks", "checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim,", "tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs,", "1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape)", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "= mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets", "kl > 0 when posteriors != priors batch_size = 10 params = distribution.parameters", "also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data,", "tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return", "and # limitations under the License. # import abc import numpy as np", "3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu,", "== [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison", "lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu", "= np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors", "axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))]", "np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self,", "test kl is 0 when posteriors == priors posterior = distribution assert lv._local_kls(posterior)", "= lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed)", "if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls >", "OF ANY KIND, either express or implied. # See the License for the", "accidentally removing some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert", "actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim,", "_ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True)", "np import pytest import tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers", "lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs,", "** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior", "TrackableLayer does not have a metaclass, and hence by adding the ABCMeta to", "\"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert type(LayerWithObservations) is abc.ABCMeta", "test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets = test_data num_data, x_dim", "[] for d in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn", "expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1,", "= lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,)", "loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior)", "d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim):", "distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim())", "test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means)", "5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std =", "lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def", "seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed)", "def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when", "== 0 # test kl > 0 when posteriors != priors batch_size =", "prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "[1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std", "[tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None,", "q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10)", "w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape),", "tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer,", "seed = 123 inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape =", "(batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim)", "__init__(self, expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\",", "= np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std)", "scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs", "sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1)", "w_dim, seed2): seed = 123 inputs, targets = test_data num_data, x_dim = inputs.shape", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self, actual):", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "ABCMeta to LayerWithObservations we are not accidentally removing some required TensorFlow magic metaclass.", "inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs,", "observations=observations, training=True) # assert_called_once_with uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs),", "ensures that TrackableLayer does not have a metaclass, and hence by adding the", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "required by applicable law or agreed to in writing, software # distributed under", "by adding the ABCMeta to LayerWithObservations we are not accidentally removing some required", "test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures", "np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations,", "applicable law or agreed to in writing, software # distributed under the License", "means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] )", "w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors ==", "def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim):", "k: [v + 0.5 for _ in range(batch_size)] for k, v in params.items()", "# Copyright (c) 2021 The GPflux Contributors. # # Licensed under the Apache", "or agreed to in writing, software # distributed under the License is distributed", "np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called()", "under the License. # import abc import numpy as np import pytest import", "have a metaclass, and hence by adding the ABCMeta to LayerWithObservations we are", "123 inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape", "import pytest import tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected =", "tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is", "distribution.parameters posterior_params = { k: [v + 0.5 for _ in range(batch_size)] for", "lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls", "test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors", "params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls", "tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag", "@pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1", "posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected =", "# limitations under the License. # import abc import numpy as np import", "posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3,", "writing, software # distributed under the License is distributed on an \"AS IS\"", "GPflux Contributors. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)]", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "num_data = 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv", "License. # You may obtain a copy of the License at # #", "10 params = distribution.parameters posterior_params = { k: [v + 0.5 for _", "\"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in [1,", "lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with", "= LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors == priors posterior", "match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed", "means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim))", "compliance with the License. # You may obtain a copy of the License", "prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations =", "and hence by adding the ABCMeta to LayerWithObservations we are not accidentally removing", "required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type", "not have a metaclass, and hence by adding the ABCMeta to LayerWithObservations we", "# assert_called_once_with uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss", "tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in [1, 5]: mean", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape)", "posteriors == priors posterior = distribution assert lv._local_kls(posterior) == 0 # test kl", "LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)],", "ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected,", "def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions", "prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2)", "= tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected", "[0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison which", "TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\"", "v in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior)", "# # Copyright (c) 2021 The GPflux Contributors. # # Licensed under the", "= LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan)", "mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d)", "not use this file except in compliance with the License. # You may", "= np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag", "TrackableLayer); this test ensures that TrackableLayer does not have a metaclass, and hence", "License, Version 2.0 (the \"License\"); # you may not use this file except", "encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\",", "get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0", "local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means =", "############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test", "tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d))", "} posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "priors posterior = distribution assert lv._local_kls(posterior) == 0 # test kl > 0", "\"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that", ") posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag))", "(w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, )", "numpy as np import pytest import tensorflow as tf import tensorflow_probability as tfp", "# you may not use this file except in compliance with the License.", "[inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses ==", "specific language governing permissions and # limitations under the License. # import abc", "agreed to in writing, software # distributed under the License is distributed on", "= tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag,", "gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected):", "LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer", "LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations", "3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape),", "assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means", "targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0]", "(the \"License\"); # you may not use this file except in compliance with", "seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior =", "np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim =", "5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape =", "= np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############", "# Unless required by applicable law or agreed to in writing, software #", "by applicable law or agreed to in writing, software # distributed under the", "0 when posteriors == priors posterior = distribution assert lv._local_kls(posterior) == 0 #", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations,", "self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5])", "for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss)", "y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs)", "in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l)", "gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def", "file except in compliance with the License. # You may obtain a copy", "training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1,", "5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs,", "2021 The GPflux Contributors. # # Licensed under the Apache License, Version 2.0", "= tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############", "local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1,", "return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in [1, 5]:", "License for the specific language governing permissions and # limitations under the License.", "I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d", "permissions and # limitations under the License. # import abc import numpy as", "############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim():", "to in writing, software # distributed under the License is distributed on an", "@pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder", "tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior,", "np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer);", "seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations", "np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors =", "pytest import tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl", "implied. # See the License for the specific language governing permissions and #", "== (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data,", "\"License\"); # you may not use this file except in compliance with the", "# also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker,", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "when posteriors == priors posterior = distribution assert lv._local_kls(posterior) == 0 # test", "LayerWithObservations we are not accidentally removing some required TensorFlow magic metaclass. \"\"\" assert", "metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert type(LayerWithObservations) is", "def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim,", "= [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses", "encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2)", "> 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data =", "lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\",", "mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets =", "axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations,", "limitations under the License. # import abc import numpy as np import pytest", "get_distributions_with_w_dim(): distributions = [] for d in [1, 5]: mean = np.zeros(d) scale_tri_l", "posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher:", "or implied. # See the License for the specific language governing permissions and", "= np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass", "[np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls =", "kl is 0 when posteriors == priors posterior = distribution assert lv._local_kls(posterior) ==", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "metaclass, and hence by adding the ABCMeta to LayerWithObservations we are not accidentally", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ #", "= DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3),", "test kl > 0 when posteriors != priors batch_size = 10 params =", "np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag =", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I)", "mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d))", "encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan)", "some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is", "are not accidentally removing some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ ==", "_zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions =", "2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls", "the License. # import abc import numpy as np import pytest import tensorflow", "= gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self,", "np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert", "= 43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior =", "for d in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn =", "= LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data,", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn,", "= [] for d in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d)", "adding the ABCMeta to LayerWithObservations we are not accidentally removing some required TensorFlow", "assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim):", "[None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets =", "_ in range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray) } posterior", "assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses ==", "prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs,", "removing some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer)", "as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import", "posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape ==", "LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors == priors posterior =", "import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\")", "############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim),", "posterior_params = { k: [v + 0.5 for _ in range(batch_size)] for k,", "The GPflux Contributors. # # Licensed under the Apache License, Version 2.0 (the", "use this file except in compliance with the License. # You may obtain", "LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior", "= tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) **", "posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data,", "= (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors =", "(via TrackableLayer); this test ensures that TrackableLayer does not have a metaclass, and", "= 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv =", "in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert", "__eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data,", "scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets = test_data num_data,", "= np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ =", "scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for d in [1, 5]: mean =", "License. # import abc import numpy as np import pytest import tensorflow as", "encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _", "_ = lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison which fails", ") encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs,", "(c) 2021 The GPflux Contributors. # # Licensed under the Apache License, Version", "targets = test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data,", "@pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123 inputs, targets", "magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert type(LayerWithObservations)", "np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ #", "posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls,", "lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected)", "test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape", "assert lv._local_kls(posterior) == 0 # test kl > 0 when posteriors != priors", "############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None,", "2.0 (the \"License\"); # you may not use this file except in compliance", ") q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls =", "lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim),", "x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs =", "this test ensures that TrackableLayer does not have a metaclass, and hence by", "in range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray) } posterior =", "sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected)", "for the specific language governing permissions and # limitations under the License. #", "= expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def", "a metaclass, and hence by adding the ABCMeta to LayerWithObservations we are not", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True,", "= (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2,", "to LayerWithObservations we are not accidentally removing some required TensorFlow magic metaclass. \"\"\"", "gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer", "# # Unless required by applicable law or agreed to in writing, software", "TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,) assert type(TrackableLayer) is type assert", "express or implied. # See the License for the specific language governing permissions", "= posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls,", "= np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets],", "= lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison which fails on", "on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes", "# import abc import numpy as np import pytest import tensorflow as tf", "either express or implied. # See the License for the specific language governing", "0.5 for _ in range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray)", "** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder =", "does not have a metaclass, and hence by adding the ABCMeta to LayerWithObservations", "np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt)", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim)", "num_data, x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data,", "= [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\",", "** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs =", "posterior = distribution assert lv._local_kls(posterior) == 0 # test kl > 0 when", "gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities", "400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder,", "= lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior,", "lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def", "mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests", "q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected", "num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior =", "is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does", "np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of", "the License. # You may obtain a copy of the License at #", "DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data,", "lv._local_kls(posterior) == 0 # test kl > 0 when posteriors != priors batch_size", "x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag(", "posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match", "for _ in range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray) }", "distributions = [] for d in [1, 5]: mean = np.zeros(d) scale_tri_l =", "np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs,", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400", "prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt", "(num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag(", "= { k: [v + 0.5 for _ in range(batch_size)] for k, v", "tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does not have a metaclass,", "expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def", "seed2): seed = 123 inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape", "gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\"", "governing permissions and # limitations under the License. # import abc import numpy", "lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior", "observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def", "training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\"", "with the License. # You may obtain a copy of the License at", "@pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl", "= lv(inputs) encoder.assert_not_called() assert lv.losses == [0.0] _ = lv(inputs, observations=observations, training=True) #", "[1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder =", "of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does not have a", "distribution assert lv._local_kls(posterior) == 0 # test kl > 0 when posteriors !=", "import tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from", "2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior =", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0,", "hence by adding the ABCMeta to LayerWithObservations we are not accidentally removing some", "shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2):", "[1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed = 123", "not accidentally removing some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__ == (TrackableLayer,)", "tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior,", "= lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"]", "import abc import numpy as np import pytest import tensorflow as tf import", "for k, v in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls", "loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder)", "a subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does not", "scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l) std = np.ones(d) mvn_diag = tfp.distributions.MultivariateNormalDiag(mean,", "law or agreed to in writing, software # distributed under the License is", "the License for the specific language governing permissions and # limitations under the", "= distribution.parameters posterior_params = { k: [v + 0.5 for _ in range(batch_size)]", "observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ = lv(inputs) encoder.assert_not_called() assert", "test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior", "= distribution assert lv._local_kls(posterior) == 0 # test kl > 0 when posteriors", "[1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape", "prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) **", "std) distributions.append((mvn, d)) distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\",", "!= priors batch_size = 10 params = distribution.parameters posterior_params = { k: [v", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5])", "= mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed)", "# test kl > 0 when posteriors != priors batch_size = 10 params", "tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)],", "tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected def", "[v + 0.5 for _ in range(batch_size)] for k, v in params.items() if", "tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder,", "training=True) # assert_called_once_with uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True)", "targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten():", "isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls = lv._local_kls(posterior) assert np.all(local_kls > 0)", "abc import numpy as np import pytest import tensorflow as tf import tensorflow_probability", "in compliance with the License. # You may obtain a copy of the", "mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed) sample_prior = lv(inputs, seed=seed2) tf.random.set_seed(seed) prior_expected", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "params = distribution.parameters posterior_params = { k: [v + 0.5 for _ in", "prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs,", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "q_mu = posteriors.parameters[\"loc\"] q_sqrt = posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors))", "Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = [] for", "== priors posterior = distribution assert lv._local_kls(posterior) == 0 # test kl >", "that TrackableLayer does not have a metaclass, and hence by adding the ABCMeta", "tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from gpflux.layers", "axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via", "def get_distributions_with_w_dim(): distributions = [] for d in [1, 5]: mean = np.zeros(d)", "test ensures that TrackableLayer does not have a metaclass, and hence by adding", "See the License for the specific language governing permissions and # limitations under", "scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "range(batch_size)] for k, v in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params)", "import DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "rtol=1e-10) class ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self, actual): return", "from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def _zero_one_normal_prior(w_dim):", "lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is 0 when posteriors == priors", "batch_size = 10 params = distribution.parameters posterior_params = { k: [v + 0.5", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc,", "prior=distribution) # test kl is 0 when posteriors == priors posterior = distribution", "np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match @pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42])", "0 when posteriors != priors batch_size = 10 params = distribution.parameters posterior_params =", "Contributors. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "the specific language governing permissions and # limitations under the License. # import", "# test kl is 0 when posteriors == priors posterior = distribution assert", "= posteriors.parameters[\"scale_diag\"] gpflow_local_kls = gauss_kl(q_mu, q_sqrt) tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors)) np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10) class", "# Utilities ############ def _zero_one_normal_prior(w_dim): \"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim))", "LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu = posteriors.parameters[\"loc\"]", "= test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim)", "priors batch_size = 10 params = distribution.parameters posterior_params = { k: [v +", "np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs = np.concatenate(observations, axis=-1) _ =", "Version 2.0 (the \"License\"); # you may not use this file except in", "distributions.append((mvn_diag, d)) return distributions ############ # Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution,", "except in compliance with the License. # You may obtain a copy of", "encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means) lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data,", "def __init__(self, expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True)", "loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, )", "we are not accidentally removing some required TensorFlow magic metaclass. \"\"\" assert LayerWithObservations.__bases__", "when posteriors != priors batch_size = 10 params = distribution.parameters posterior_params = {", "= tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv =", "> 0 when posteriors != priors batch_size = 10 params = distribution.parameters posterior_params", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "subclass of tf.keras.layers.Layer (via TrackableLayer); this test ensures that TrackableLayer does not have", "is 0 when posteriors == priors posterior = distribution assert lv._local_kls(posterior) == 0", "lv(inputs, observations=observations, training=True) # assert_called_once_with uses == for comparison which fails on arrays", "comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) #", "assert_called_once_with uses == for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss =", "== for comparison which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses,", "np.testing.assert_array_equal(sample_prior, prior_expected) tf.random.set_seed(seed) sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2) tf.random.set_seed(seed) posterior_expected =", "expected def __eq__(self, actual): return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker,", "def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43, 3, 1 prior_shape = (w_dim,)", "\"\"\" N(0, I) prior \"\"\" return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim)) def get_distributions_with_w_dim(): distributions = []", "DirectlyParameterizedNormalDiag from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer tf.keras.backend.set_floatx(\"float64\") ############ # Utilities ############ def", "posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv", "2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(encoder=encoder, prior=prior) inputs = np.full((num_data,", "k, v in params.items() if isinstance(v, np.ndarray) } posterior = lv.distribution_class(**posterior_params) local_kls =", "posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1) np.testing.assert_array_equal(sample_posterior, posterior_expected) def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a", "class ArrayMatcher: def __init__(self, expected): self.expected = expected def __eq__(self, actual): return np.allclose(actual,", "np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets] encoder_inputs", "which fails on arrays encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True) expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))] np.testing.assert_equal(lv.losses, expected_loss) # also", "w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) # test kl is", "Tests ############ @pytest.mark.parametrize(\"distribution, w_dim\", get_distributions_with_w_dim()) def test_local_kls(distribution, w_dim): lv = LatentVariableLayer(encoder=None, prior=distribution) #", "np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_local_kl_gpflow_consistency(w_dim): num_data", "= lv._local_kls(posterior) assert np.all(local_kls > 0) assert local_kls.shape == (batch_size,) @pytest.mark.parametrize(\"w_dim\", [1, 5])", "tensorflow as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders", "return np.allclose(actual, self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim", "language governing permissions and # limitations under the License. # import abc import", "d in [1, 5]: mean = np.zeros(d) scale_tri_l = np.eye(d) mvn = tfp.distributions.MultivariateNormalTriL(mean,", "5]) def test_local_kl_gpflow_consistency(w_dim): num_data = 400 means = np.random.randn(num_data, w_dim) encoder = DirectlyParameterizedNormalDiag(num_data,", "{ k: [v + 0.5 for _ in range(batch_size)] for k, v in", "as tf import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import", "scale_diag=np.random.randn(*posteriors_shape) ** 2, ) encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag)) lv = LatentVariableLayer(prior=prior, encoder=encoder) tf.random.set_seed(seed)", "= inputs.shape prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape),", "def test_no_tensorflow_metaclass_overwritten(): \"\"\" LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer); this test", "tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*prior_shape), scale_diag=np.random.randn(*prior_shape) ** 2, ) posteriors = tfp.distributions.MultivariateNormalDiag( loc=np.random.randn(*posteriors_shape), scale_diag=np.random.randn(*posteriors_shape) ** 2,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "inputs, targets = test_data num_data, x_dim = inputs.shape prior_shape = (w_dim,) posteriors_shape =", "+ 0.5 for _ in range(batch_size)] for k, v in params.items() if isinstance(v,", "import tensorflow_probability as tfp from gpflow.kullback_leiblers import gauss_kl from gpflux.encoders import DirectlyParameterizedNormalDiag from", "import numpy as np import pytest import tensorflow as tf import tensorflow_probability as", "= LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim)) posteriors = lv._inference_posteriors( [np.random.randn(num_data, 3), np.random.randn(num_data, 2)] ) q_mu =", "self.expected, equal_nan=True) @pytest.mark.parametrize(\"w_dim\", [1, 5]) def test_latent_variable_layer_losses(mocker, w_dim): num_data, x_dim, y_dim = 43,", "@pytest.mark.parametrize(\"w_dim\", [1, 5]) @pytest.mark.parametrize(\"seed2\", [None, 42]) def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2): seed =", "y_dim = 43, 3, 1 prior_shape = (w_dim,) posteriors_shape = (num_data, w_dim) prior", "= np.full((num_data, x_dim), np.nan) targets = np.full((num_data, y_dim), np.nan) observations = [inputs, targets]" ]
[ "retrieve trust relationships (auth: only creator and admins allowed) # POST /trust with", "= config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found')", "and admins allowed) # POST /trust with json body to initiate a trust", "params: creator = params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No json", "config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return", "and peerid) to retrieve trust relationships (auth: only creator and admins allowed) #", "'ignore')) peer_approved = None if 'approved' in params: if params['approved'] and params['approved'] ==", "= { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved':", "def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False)", "to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' +", "else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else:", "(auth: creator, admin, or # peer secret) # Handling requests to trust/ class", "return if len(baseuri) == 0 or len(peerid) == 0 or len(type) == 0:", "return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "if not deleted: self.response.set_status(502, 'Not able to delete relationship with peer.') return self.response.set_status(204,", "peerid) to retrieve trust relationships (auth: only creator and admins allowed) # POST", "subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid", "from actingweb import actor from actingweb import config from actingweb import trust from", "json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') #", "not a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() ==", "> 0: baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc'))", "to retrieve trust relationships (auth: only creator and admins allowed) # POST /trust", "else: baseuri = '' if 'id' in params: peerid = params['id'] else: peerid", "if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri = '' if 'id'", "baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship,", "from google.appengine.ext.webapp import template import json import logging import datetime import time #", "= \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to", "params['trustee_root'] else: trustee_root = '' if 'creator' in params: creator = params['creator'] else:", "'Not modified') def delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"] !=", "from actingweb import config from actingweb import trust from actingweb import auth import", "\"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid):", "'desc' in params: desc = params['desc'] except ValueError: url = self.request.get('url') relationship =", "peer) (auth: creator, admin, or # peer secret) # Handling requests to trust/", "if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust')", "'' if 'approved' in params: if params['approved'] == True or params['approved'].lower() == \"true\":", "GET /trust with query parameters (relationship, type, and peerid) to retrieve trust relationships", "?peer=true if the delete is from the peer) (auth: creator, admin, or #", "if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET',", "creator, admin, or # peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler):", "another (reciprocal relationship) (auth: only creator and admins allowed) # POST /trust/{relationship} with", "self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self,", "verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\",", "return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri =", "{ 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved,", "return my_trust = relationships[0] # If the peer did a GET to verify", "peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship,", "of GET param peer=true is a way of forcing no deletion of a", "'' desc = '' relationship = Config.default_relationship type = '' try: params =", "creator = params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No json content')", "+ myself.id + '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair = {", "'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself, check) =", "import trust from actingweb import auth import webapp2 import os from google.appengine.ext.webapp import", "= self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc", "myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config,", "we haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False):", "my_trust = relationships[0] # If the peer did a GET to verify if", "= True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri", "= None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri", "relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>',", "relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id", "'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret':", "put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if", "if 'approved' in params: if params['approved'] == True or params['approved'].lower() == \"true\": approved", "else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri')", "/trust with json body to initiate a trust relationship between this # actor", "params: secret = params['secret'] else: secret = '' if 'desc' in params: desc", "relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') ==", "desc = params['desc'] else: desc = '' if 'approved' in params: if params['approved']", "e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id,", "params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No json content') return if", "self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend class", "new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created')", "params['desc'] else: desc = '' if 'approved' in params: if params['approved'] == True", "method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params:", "import webapp2 import os from google.appengine.ext.webapp import template import json import logging import", "peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE':", "import time # /trust handlers # # GET /trust with query parameters (relationship,", "# ?peer=true if the delete is from the peer) (auth: creator, admin, or", "my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri':", "self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config, myself, check)", "= { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved':", "/trust/{relationship} with json body to create new trust # relationship (see config.py for", "'url' in params: url = params['url'] else: url = '' if 'relationship' in", "if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri = ''", "type = params['type'] if 'desc' in params: desc = params['desc'] except ValueError: url", "= params['desc'] else: desc = '' if 'approved' in params: if params['approved'] ==", "return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved =", "== 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not", "creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship", "params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No json content') return if", "'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret':", "trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself,", "in params: url = params['url'] else: url = '' if 'relationship' in params:", "import template import json import logging import datetime import time # /trust handlers", "peerid: isPeer = True else: # Use of GET param peer=true is a", "and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship", "between this # actor and another (reciprocal relationship) (auth: only creator and admins", "else: type = '' if 'secret' in params: secret = params['secret'] else: secret", "If the peer did a GET to verify if check.trust and check.trust.peerid ==", "type = '' peerid = '' relationship = self.request.get('relationship') type = self.request.get('type') peerid", "rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret,", "self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id +", "content') def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship,", "trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid)", "same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content')", "(Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"]", "'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified':", "/trust/{relationship}}/{actorid} with a json body to change details on a relationship (baseuri, secret,", "+ \"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid,", "verificationToken = my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id':", "json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check)", "peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True Config = config.config()", "(auth: only creator and admins allowed) # POST /trust/{relationship} with json body to", "peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if", "out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202,", "else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self,", "the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No", "on a relationship (baseuri, secret, desc) (auth: creator, # admin, or peer secret)", "= self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True Config = config.config() relationships", "to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return", "check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type = '' peerid = ''", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else:", "out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests to", "+ myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair = {", "True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404,", "None except ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json", "Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not", "self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self,", "a json body to change details on a relationship (baseuri, secret, desc) (auth:", "relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust", "# Since we received a request for a relationship, assume that peer has", "self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "required) # GET /trust/{relationship}}/{actorid} to get details on a specific relationship (auth: creator,", "len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and", "\"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if", "'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type':", "not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If the peer", "'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201,", "yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False", "= params['baseuri'] else: baseuri = '' if 'desc' in params: desc = params['desc']", "peer about changes in the relationship # PUT /trust/{relationship}}/{actorid} with a json body", "params: if params['approved'] == True or params['approved'].lower() == \"true\": approved = True else:", "self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved =", "params['id'] else: peerid = '' if 'type' in params: type = params['type'] else:", "actingweb import actor from actingweb import config from actingweb import trust from actingweb", "self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check)", "'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if", "auth required) # GET /trust/{relationship}}/{actorid} to get details on a specific relationship (auth:", "url = '' if 'relationship' in params: relationship = params['relationship'] if 'type' in", "'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified':", "params['approved'] and params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400, 'No json", "= '' if 'secret' in params: secret = params['secret'] else: secret = ''", "'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, })", "peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship,", "able to delete relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>',", "default relationship and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to get", "params: desc = params['desc'] else: desc = '' if 'approved' in params: if", "= auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] != 200", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret", "secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method')", "return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "# DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if the delete", "!= 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships =", "if 'desc' in params: desc = params['desc'] else: desc = '' if 'approved'", "id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if", "pairs = [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid':", "def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself", "'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True", "return isPeer = False if check.trust and check.trust.peerid == peerid: isPeer = True", "== 0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust(", "if not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root", "if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def", "except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok')", "if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust", "if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If the", "'relationship' in params: relationship = params['relationship'] if 'type' in params: type = params['type']", "'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return", "Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship,", "GET /trust/{relationship}}/{actorid} to get details on a specific relationship (auth: creator, admin, or", "new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable", "my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok')", "len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self,", "== 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config,", "# Use of GET param peer=true is a way of forcing no deletion", "peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root +", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "template import json import logging import datetime import time # /trust handlers #", "or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved'))", "new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved:", "peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid == peerid:", "check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"] != 200:", "requestor is not a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if", "same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships:", "new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created')", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403)", "import config from actingweb import trust from actingweb import auth import webapp2 import", "attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else: approved", "if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id,", "= auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust',", "testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True Config", "peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid):", "peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri", "myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"] != 200:", "else: peerid = '' if 'type' in params: type = params['type'] else: type", "myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken,", "0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship):", "peer_approved = True except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid,", "is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204,", "assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type,", "a way of forcing no deletion of a peer # relationship even when", "peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404,", "id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] != 200 and check.response[\"code\"]", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri", "class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself,", "not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type = '' peerid =", "path='trust', subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403)", "auth import webapp2 import os from google.appengine.ext.webapp import template import json import logging", "relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid,", "myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0]", "'' if 'verify' in params: verificationToken = params['verify'] else: verificationToken = None except", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return", "if not relationships: self.response.set_status(404, 'Not found') return pairs = [] for rel in", "peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair", "(primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer =", "if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT',", "= \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship,", "'Not found') return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else:", "class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return", "information to a peer about changes in the relationship # PUT /trust/{relationship}}/{actorid} with", "relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken,", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not", "return # We allow non-approved peers to delete even if we haven't approved", "return relationship = '' type = '' peerid = '' relationship = self.request.get('relationship')", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try:", "Use of GET param peer=true is a way of forcing no deletion of", "handlers # # GET /trust with query parameters (relationship, type, and peerid) to", "if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST',", "'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') #", "baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else:", "out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config,", "len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust =", "'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root = '' if", "rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok')", "self.response.set_status(404, \"Not found\") return # Access is the same as /trust if not", "if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships,", "!= 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params", "'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out =", "approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return", "# Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id,", "peer secret) # POST /trust/{relationship}}/{actorid} to send information to a peer about changes", "subpath=relationship, add_response=False) if not myself: return if relationship != 'trustee': self.response.set_status(404, \"Not found\")", "if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type = '' peerid", "myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able", "to initiate a trust relationship between this # actor and another (reciprocal relationship)", "path='trust', subpath=relationship) if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust',", "secret = params['secret'] else: secret = '' if 'desc' in params: desc =", "subpath=relationship) if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>',", "if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self, id,", "'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config, myself,", "self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found')", "'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken':", "self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False)", "not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair = {", "not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer: deleted =", "self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type)", "isPeer = False if check.trust and check.trust.peerid == peerid: isPeer = True else:", "import auth import webapp2 import os from google.appengine.ext.webapp import template import json import", "= None except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) > 0:", "len(baseuri) == 0 or len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing", "url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust", "as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "params['approved'] == True or params['approved'].lower() == \"true\": approved = True else: approved =", "'' if 'relationship' in params: relationship = params['relationship'] if 'type' in params: type", "modified') def put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs = [] for rel", "\"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust')", "myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete relationship with peer.')", "if 'verify' in params: verificationToken = params['verify'] else: verificationToken = None except ValueError:", "check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc = '' relationship = Config.default_relationship", "myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship,", "and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship,", "'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out)", "a peer about changes in the relationship # PUT /trust/{relationship}}/{actorid} with a json", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = ''", "+ '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri,", "to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if", "== 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid)", "= params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No json content') return", "approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g.", "None except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) == 0 or", "# relationship even when requestor is not a peer (primarily for testing purposes)", "'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc':", "# relationship (see config.py for default relationship and auto-accept, no # auth required)", "# POST /trust with json body to initiate a trust relationship between this", "'' if 'desc' in params: desc = params['desc'] else: desc = '' if", "new_trust.relationship + \"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid':", "self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405,", "if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri = '' if 'desc'", "to delete relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler,", "return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri']", "not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return", "== peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = ''", "' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not", "if we haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid,", "return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"]", "relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if", "secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not", "json content') return if len(baseuri) == 0 or len(peerid) == 0 or len(type)", "id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method')", "a GET to verify if check.trust and check.trust.peerid == peerid and not my_trust.verified:", "to a peer about changes in the relationship # PUT /trust/{relationship}}/{actorid} with a", "add_response=False) if not myself or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self,", "and Config.default_relationship == relationship: approved = True else: approved = False # Since", "[] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship':", "0: baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) >", "\"true\": isPeer = True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if", "not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return", "pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved,", "my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair)", "# peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id):", "myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"]", "desc = '' if 'approved' in params: if params['approved'] == True or params['approved'].lower()", "else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc')", "new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc,", "check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if not", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri'", "# GET /trust with query parameters (relationship, type, and peerid) to retrieve trust", "class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship,", "self.response.set_status(403) return relationship = '' type = '' peerid = '' relationship =", "deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete relationship with peer.') return", "'' relationship = Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "verify if check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken =", "> 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id,", "return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + \"/\" +", "requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method')", "if 'type' in params: type = params['type'] if 'desc' in params: desc =", "if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc = ''", "relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not", "return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return", "not relationships: self.response.set_status(404, 'Not found') return pairs = [] for rel in relationships:", "and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair =", "else: secret = '' if 'desc' in params: desc = params['desc'] else: desc", "peer # relationship even when requestor is not a peer (primarily for testing", "'baseuri' in params: baseuri = params['baseuri'] else: baseuri = '' if 'id' in", "get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root", "return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "= self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid,", "new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair)", "True else: approved = False # Since we received a request for a", "# Access is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return", "trust relationships (auth: only creator and admins allowed) # POST /trust with json", "= True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships:", "== True or params['approved'].lower() == \"true\": approved = True else: approved = None", "in params: desc = params['desc'] else: desc = '' if 'verify' in params:", "+ '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid,", "even when requestor is not a peer (primarily for testing purposes) peerGet =", "'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator)", "relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself:", "if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator'", "= '' if 'id' in params: peerid = params['id'] else: peerid = ''", "'baseuri' in params: baseuri = params['baseuri'] else: baseuri = '' if 'desc' in", "delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False)", "allowed) # POST /trust/{relationship} with json body to create new trust # relationship", "params: if params['approved'] and params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400,", "= myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust =", "desc = params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type =", "in params: creator = params['creator'] else: creator = None except ValueError: self.response.set_status(400, 'No", "new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out", "params['url'] else: url = '' if 'relationship' in params: relationship = params['relationship'] if", "not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid):", "in params: type = params['type'] else: type = '' if 'secret' in params:", "'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator' in", "get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self,", "> 0: if self.request.get('approved').lower() == \"true\": approved = True else: approved = None", "trustee_root = '' if 'creator' in params: creator = params['creator'] else: creator =", "'secret' in params: secret = params['secret'] else: secret = '' if 'desc' in", "rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"]", "type = params['type'] else: type = '' if 'secret' in params: secret =", "# admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with", "= None if 'approved' in params: if params['approved'] and params['approved'] == True: peer_approved", "only creator and admins allowed) # POST /trust with json body to initiate", "else: trustee_root = '' if 'creator' in params: creator = params['creator'] else: creator", "True except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204,", "return self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self,", "'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself,", "if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root'", "def delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship,", "new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id", "# /trust handlers # # GET /trust with query parameters (relationship, type, and", "\"Not found\") return # Access is the same as /trust if not check.checkAuthorisation(path='trust',", "'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + \"/\"", "'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201,", "not myself: return if relationship != 'trustee': self.response.set_status(404, \"Not found\") return # Access", "len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if", "myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8',", "else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config, myself, check) =", "check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else:", "<reponame>actingweb/box-actingweb<filename>aw-actor-trust.py #!/usr/bin/env python # from actingweb import actor from actingweb import config from", "relationship and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to get details", "\"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific", "e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT':", "to create new trust # relationship (see config.py for default relationship and auto-accept,", "new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret,", "'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out =", "self.response.set_status(400, 'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator:", "requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id)", "a trust relationship between this # actor and another (reciprocal relationship) (auth: only", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling", "if not myself or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check)", "forcing no deletion of a peer # relationship even when requestor is not", "peers to delete even if we haven't approved the relationship yet if not", "in params: baseuri = params['baseuri'] else: baseuri = '' if 'desc' in params:", "Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else: approved = False #", "(check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "= '' relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships =", "self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken()", "params: verificationToken = params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No json", "details on a relationship (baseuri, secret, desc) (auth: creator, # admin, or peer", "self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True Config = config.config() relationships =", "relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check) =", "!= 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc", "} out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else:", "peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs = [] for", "trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship +", "self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc =", "def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False)", "baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc') and len(self.request.get('desc')) > 0:", "peerid = '' relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships", "except ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content')", "logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "initiate a trust relationship between this # actor and another (reciprocal relationship) (auth:", "my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out", "check.trust and check.trust.peerid == peerid: isPeer = True else: # Use of GET", "id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not", "found\") def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship,", "request for a relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid,", "auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to get details on a", "new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling", "desc = '' if 'verify' in params: verificationToken = params['verify'] else: verificationToken =", "'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc':", "'creator' in params: creator = params['creator'] else: creator = None except ValueError: self.response.set_status(400,", "'Not found') return pairs = [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri,", "except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root)", "received a request for a relationship, assume that peer has approved new_trust =", "my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid,", "time # /trust handlers # # GET /trust with query parameters (relationship, type,", "python # from actingweb import actor from actingweb import config from actingweb import", "in params: verificationToken = params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No", "# PUT /trust/{relationship}}/{actorid} with a json body to change details on a relationship", "desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id +", "secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust relationship')", "= '' if 'desc' in params: desc = params['desc'] else: desc = ''", "self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If the peer did a", "datetime import time # /trust handlers # # GET /trust with query parameters", "not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc = '' relationship =", "self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved')) >", "PUT /trust/{relationship}}/{actorid} with a json body to change details on a relationship (baseuri,", "params: desc = params['desc'] else: desc = '' if 'verify' in params: verificationToken", "return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root']", "self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "(with # ?peer=true if the delete is from the peer) (auth: creator, admin,", "relationship (with # ?peer=true if the delete is from the peer) (auth: creator,", "self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships:", "self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved') and", "= myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not", "= self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400,", "'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc':", "POST /trust/{relationship} with json body to create new trust # relationship (see config.py", "in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved,", "200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params =", "in params: baseuri = params['baseuri'] else: baseuri = '' if 'id' in params:", "check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to delete", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403)", "self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok')", "ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if", "body to initiate a trust relationship between this # actor and another (reciprocal", "'' if 'type' in params: type = params['type'] else: type = '' if", "specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method')", "id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def", "= self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204,", "put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if", "found') return my_trust = relationships[0] # If the peer did a GET to", "my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc,", "Config.default_relationship == relationship: approved = True else: approved = False # Since we", "'No content') def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "a relationship (baseuri, secret, desc) (auth: creator, # admin, or peer secret) #", "== peerid: isPeer = True else: # Use of GET param peer=true is", "subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None", "'approved' in params: if params['approved'] == True or params['approved'].lower() == \"true\": approved =", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret =", "content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved", "type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root", "id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not", "check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in", "= auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"] != 200: return", "rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc,", "myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403,", "0: if self.request.get('approved').lower() == \"true\": approved = True else: approved = None if", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = ''", "self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid))", "rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self,", "post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or", "parameters (relationship, type, and peerid) to retrieve trust relationships (auth: only creator and", "'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type':", "desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config,", "verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship':", "self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id,", "type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url", "We allow non-approved peers to delete even if we haven't approved the relationship", "'' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url']", "+ new_trust.relationship + \"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id,", "type = '' if 'secret' in params: secret = params['secret'] else: secret =", "else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid,", "my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\"", "= json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root =", "if not myself: return if relationship != 'trustee': self.response.set_status(404, \"Not found\") return #", "method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid ==", "return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params:", "self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests", "to change details on a relationship (baseuri, secret, desc) (auth: creator, # admin,", "peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'),", "self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "config from actingweb import trust from actingweb import auth import webapp2 import os", "modified') def delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself, check)", "send information to a peer about changes in the relationship # PUT /trust/{relationship}}/{actorid}", "the delete is from the peer) (auth: creator, admin, or # peer secret)", "'approved' in params: if params['approved'] and params['approved'] == True: peer_approved = True except", "relationships: self.response.set_status(404, 'Not found') return pairs = [] for rel in relationships: pairs.append({", "for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship,", "self.response.set_status(400, 'No json content') return if len(baseuri) == 0 or len(peerid) == 0", "return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else: approved =", "did a GET to verify if check.trust and check.trust.peerid == peerid and not", "relationship = params['relationship'] if 'type' in params: type = params['type'] if 'desc' in", "way of forcing no deletion of a peer # relationship even when requestor", "or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true", "post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if", "isPeer = True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not", "} out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else:", "= '' if 'verify' in params: verificationToken = params['verify'] else: verificationToken = None", "the peer) (auth: creator, admin, or # peer secret) # Handling requests to", "in params: desc = params['desc'] else: desc = '' if 'approved' in params:", "self.request.get('approved').lower() == \"true\": approved = True else: approved = None if self.request.get('baseuri') and", "self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config, myself,", "method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri", "not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri'", "found\") return # Access is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'):", "non-approved peers to delete even if we haven't approved the relationship yet if", "if relationship != 'trustee': self.response.set_status(404, \"Not found\") return # Access is the same", "subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return", "relationship # PUT /trust/{relationship}}/{actorid} with a json body to change details on a", "= json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself,", "get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\")", "to verify if check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken", "approved = True else: approved = False # Since we received a request", "about changes in the relationship # PUT /trust/{relationship}}/{actorid} with a json body to", "relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return", "application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler, name='trustHandler'), ],", "verificationToken = None except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) ==", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships", "(auth: only creator and admins allowed) # POST /trust with json body to", "or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship", "secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header(", "approved=False): self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid == peerid: isPeer", "/trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST':", "'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def", "as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def", "params: url = params['url'] else: url = '' if 'relationship' in params: relationship", "'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200,", "self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing", "= myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete relationship with", "my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself,", "self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if", "not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid):", "if params['approved'] and params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400, 'No", "/trust/{relationship}}/{actorid} to send information to a peer about changes in the relationship #", "change details on a relationship (baseuri, secret, desc) (auth: creator, # admin, or", "actingweb import auth import webapp2 import os from google.appengine.ext.webapp import template import json", "relationship = '' type = '' peerid = '' relationship = self.request.get('relationship') type", "self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship +", "in params: trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator' in params:", "relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs = []", "= True else: approved = None except ValueError: if not self.request.get('_method') or self.request.get('_method')", "peer=true is a way of forcing no deletion of a peer # relationship", "self.response.set_status(403) return secret = '' desc = '' relationship = Config.default_relationship type =", "approved = False # Since we received a request for a relationship, assume", "check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] !=", "a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\":", "'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return", "'' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc =", "'' type = '' peerid = '' relationship = self.request.get('relationship') type = self.request.get('type')", "'Not found') return my_trust = relationships[0] # If the peer did a GET", "baseuri = '' if 'id' in params: peerid = params['id'] else: peerid =", "'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config, myself, check)", "myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type,", "not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid):", "= relationships[0] # If the peer did a GET to verify if check.trust", "json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri = ''", "\"true\": approved = True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) >", "self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved = True else:", "# Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') ==", "relationship = Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url'", "(baseuri, secret, desc) (auth: creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid}", "200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc =", "'Not able to delete relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([", "'No json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() ==", "path='trust', subpath=relationship, add_response=False) if not myself: return if relationship != 'trustee': self.response.set_status(404, \"Not", "type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if", "delete even if we haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>',", "/trust with query parameters (relationship, type, and peerid) to retrieve trust relationships (auth:", "and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved = True else: approved", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid)", "specific relationship (auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send", "to get details on a specific relationship (auth: creator, admin, or peer secret)", "self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root =", "actingweb import config from actingweb import trust from actingweb import auth import webapp2", "'/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id':", "rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs)", "= json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri =", "= params['secret'] else: secret = '' if 'desc' in params: desc = params['desc']", "'ignore')) if 'url' in params: url = params['url'] else: url = '' if", "except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url)", "= json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url'] else: url =", "True or params['approved'].lower() == \"true\": approved = True else: approved = None except", "approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if", "peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config,", "= params['url'] else: url = '' if 'relationship' in params: relationship = params['relationship']", "{ 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved,", "peerGet.lower() == \"true\": isPeer = True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship,", "None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri =", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else:", "auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] != 200 and", "else: desc = '' if 'approved' in params: if params['approved'] == True or", "json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified')", "= '' if 'approved' in params: if params['approved'] == True or params['approved'].lower() ==", "'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified':", "peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if", "myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs =", "if 'type' in params: type = params['type'] else: type = '' if 'secret'", "\"true\": approved = True else: approved = None except ValueError: if not self.request.get('_method')", "secret = '' desc = '' relationship = Config.default_relationship type = '' try:", "# from actingweb import actor from actingweb import config from actingweb import trust", "# We allow non-approved peers to delete even if we haven't approved the", "peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self,", "params: relationship = params['relationship'] if 'type' in params: type = params['type'] if 'desc'", "return if relationship != 'trustee': self.response.set_status(404, \"Not found\") return # Access is the", "'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] =", "myself.id + '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair = { 'baseuri':", "self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd", "peer_approved = None if 'approved' in params: if params['approved'] and params['approved'] == True:", "params: peerid = params['id'] else: peerid = '' if 'type' in params: type", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "= params['type'] if 'desc' in params: desc = params['desc'] except ValueError: url =", "= '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url =", "deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to", "peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If", "Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params:", "0: self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url,", "200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params =", "deleted: self.response.set_status(502, 'Not able to delete relationship with peer.') return self.response.set_status(204, 'Ok') application", "type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret", "= '' if 'relationship' in params: relationship = params['relationship'] if 'type' in params:", "allow non-approved peers to delete even if we haven't approved the relationship yet", "that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True,", "return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self,", "admins allowed) # POST /trust with json body to initiate a trust relationship", "if 'secret' in params: secret = params['secret'] else: secret = '' if 'desc'", "myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self,", "pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved,", "!= 'trustee': self.response.set_status(404, \"Not found\") return # Access is the same as /trust", "} out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests", "True else: approved = None except ValueError: if not self.request.get('_method') or self.request.get('_method') !=", "= myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust:", "myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri':", "self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret':", "'Created') # Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id,", "auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if relationship != 'trustee':", "(see config.py for default relationship and auto-accept, no # auth required) # GET", "== 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers)) (Config,", "Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if", "self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc = '' if", "relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] # If the peer did", "None except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root',", "Access is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try:", "if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def", "= myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to", "os from google.appengine.ext.webapp import template import json import logging import datetime import time", "from the peer) (auth: creator, admin, or # peer secret) # Handling requests", "delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if", "import json import logging import datetime import time # /trust handlers # #", "subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401):", "json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\":", "relationship: approved = True else: approved = False # Since we received a", "import datetime import time # /trust handlers # # GET /trust with query", "creator and admins allowed) # POST /trust with json body to initiate a", "found') return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted", "the peer did a GET to verify if check.trust and check.trust.peerid == peerid", "'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, }", "= \"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler):", "peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers:", "== True: peer_approved = True except ValueError: self.response.set_status(400, 'No json content') return if", "'' peerid = '' relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid')", "relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') ==", "deletion of a peer # relationship even when requestor is not a peer", "peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not new_trust: self.response.set_status(403, 'Forbidden')", "= json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted')", "= params['baseuri'] else: baseuri = '' if 'id' in params: peerid = params['id']", "def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) =", "'Missing peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc,", "baseuri = params['baseuri'] else: baseuri = '' if 'desc' in params: desc =", "def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False)", "= auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if relationship !=", "= params['trustee_root'] else: trustee_root = '' if 'creator' in params: creator = params['creator']", "delete relationship with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'),", "self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self, id, relationship):", "import logging import datetime import time # /trust handlers # # GET /trust", "secret) # POST /trust/{relationship}}/{actorid} to send information to a peer about changes in", "create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship", "'desc' in params: desc = params['desc'] else: desc = '' if 'approved' in", "URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type)", "'' if 'secret' in params: secret = params['secret'] else: secret = '' if", "trust # relationship (see config.py for default relationship and auto-accept, no # auth", "params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400, 'No json content') return", "== 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship:", "or # peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self,", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url'] else:", "baseuri = '' if 'desc' in params: desc = params['desc'] else: desc =", "rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\"", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved", "'' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified')", "= '' desc = '' relationship = Config.default_relationship type = '' try: params", "return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type = ''", "add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try:", "(Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself or", "(auth: creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a", "POST /trust/{relationship}}/{actorid} to send information to a peer about changes in the relationship", "if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc = '' relationship", "json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root = params['trustee_root'] else: trustee_root = ''", "'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not", "path='trust', subpath=relationship, add_response=False) if not myself or (check.response[\"code\"] != 200 and check.response[\"code\"] !=", "= self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not", "return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return secret = '' desc = ''", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved =", "str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair =", "even if we haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE',", "200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships(", "return # Access is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403)", "params['desc'] else: desc = '' if 'verify' in params: verificationToken = params['verify'] else:", "'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] =", "not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params =", "approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return", "this # actor and another (reciprocal relationship) (auth: only creator and admins allowed)", "relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/'", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self,", "found') return pairs = [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id':", "== \"true\": isPeer = True Config = config.config() relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid)", "/trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id,", "+ new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship,", "self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def", "delete a relationship (with # ?peer=true if the delete is from the peer)", "# Access is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return", "# If the peer did a GET to verify if check.trust and check.trust.peerid", "relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer =", "approved = None except ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400,", "creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send information to a", "in params: type = params['type'] if 'desc' in params: desc = params['desc'] except", "peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved'", "= '' peerid = '' relationship = self.request.get('relationship') type = self.request.get('type') peerid =", "baseuri = params['baseuri'] else: baseuri = '' if 'id' in params: peerid =", "def get(self, id, relationship, peerid): if self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return", "'desc' in params: desc = params['desc'] else: desc = '' if 'verify' in", "!= 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to delete even", "params: desc = params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type", "we received a request for a relationship, assume that peer has approved new_trust", "auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"] != 200: return if not", "myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self,", "or peer secret) # POST /trust/{relationship}}/{actorid} to send information to a peer about", "get details on a specific relationship (auth: creator, admin, or peer secret) #", "secret, desc) (auth: creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to", "= False # Since we received a request for a relationship, assume that", "desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405,", "self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower()", "check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust and", "'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config, myself, check)", "with peer.') return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler,", "= params['type'] else: type = '' if 'secret' in params: secret = params['secret']", "requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self, id, relationship, peerid):", "\"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair", "new_trust.verified, 'type': new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"]", "len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid,", "approved = True else: approved = None except ValueError: if not self.request.get('_method') or", "False # Since we received a request for a relationship, assume that peer", "not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship):", "id=id, path='trust') if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust',", "return pairs = [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id,", "ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url) ==", "return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: '", "approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid):", "admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send information to a peer", "if 'url' in params: url = params['url'] else: url = '' if 'relationship'", "a peer # relationship even when requestor is not a peer (primarily for", "/trust handlers # # GET /trust with query parameters (relationship, type, and peerid)", "type, and peerid) to retrieve trust relationships (auth: only creator and admins allowed)", "peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def delete(self, id,", "json body to initiate a trust relationship between this # actor and another", "new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid':", "relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship,", "(Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"] !=", "from actingweb import auth import webapp2 import os from google.appengine.ext.webapp import template import", "== \"true\": approved = True else: approved = None except ValueError: if not", "def put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship)", "else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete", "import actor from actingweb import config from actingweb import trust from actingweb import", "method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404,", "relationship != 'trustee': self.response.set_status(404, \"Not found\") return # Access is the same as", "method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params: trustee_root", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return", "'verify' in params: verificationToken = params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400,", "True else: # Use of GET param peer=true is a way of forcing", "self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in", "return self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>',", "and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to", "my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type,", "(Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return", "when requestor is not a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower()", "url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if len(url) == 0:", "401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to delete even if", "params['relationship'] if 'type' in params: type = params['type'] if 'desc' in params: desc", "= True else: approved = False # Since we received a request for", "json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url'] else: url = ''", "trust from actingweb import auth import webapp2 import os from google.appengine.ext.webapp import template", "google.appengine.ext.webapp import template import json import logging import datetime import time # /trust", "if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not", "try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params: if", "else: creator = None except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root)", "if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403)", "desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc):", "= self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not", "else: approved = None except ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\":", "my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri,", "if len(baseuri) == 0 or len(peerid) == 0 or len(type) == 0: self.response.set_status(400,", "params['secret'] else: secret = '' if 'desc' in params: desc = params['desc'] else:", "else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler):", "approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else:", "of a peer # relationship even when requestor is not a peer (primarily", "'Not modified') def put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "is from the peer) (auth: creator, admin, or # peer secret) # Handling", "relationships (auth: only creator and admins allowed) # POST /trust with json body", "peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer", "json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params: if params['approved'] and params['approved']", "not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved')", "(reciprocal relationship) (auth: only creator and admins allowed) # POST /trust/{relationship} with json", "params: type = params['type'] else: type = '' if 'secret' in params: secret", "return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship,", "admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with #", "for a relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved,", "= True else: # Use of GET param peer=true is a way of", "check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in params:", "the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params =", "check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403) return try:", "Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST':", "on a specific relationship (auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid}", "method='POST'): self.response.set_status(403) return secret = '' desc = '' relationship = Config.default_relationship type", "= params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No json content') return", "/trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config, myself,", "relationship even when requestor is not a peer (primarily for testing purposes) peerGet", "# POST /trust/{relationship} with json body to create new trust # relationship (see", "return my_trust = relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted =", "None if 'approved' in params: if params['approved'] and params['approved'] == True: peer_approved =", "self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return", "check.trust.peerid == peerid: isPeer = True else: # Use of GET param peer=true", "secret) # DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if the", "params['baseuri'] else: baseuri = '' if 'desc' in params: desc = params['desc'] else:", "return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself, check) =", "(relationship, type, and peerid) to retrieve trust relationships (auth: only creator and admins", "self.request.get('_method') == 'PUT': self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship,", "'type' in params: type = params['type'] if 'desc' in params: desc = params['desc']", "relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] #", "'' relationship = self.request.get('relationship') type = self.request.get('type') peerid = self.request.get('peerid') relationships = myself.getTrustRelationships(", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in params: url = params['url'] else: url", "peerid=peerid) if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer:", "id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"] != 200: return if not", "self.response.set_status(400, 'Missing peer URL') return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret,", "isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted:", "or params['approved'].lower() == \"true\": approved = True else: approved = None except ValueError:", "if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself, check)", "relationship (baseuri, secret, desc) (auth: creator, # admin, or peer secret) # DELETE", "else: verificationToken = None except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri)", "Access is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root')", "+ new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id,", "if 'desc' in params: desc = params['desc'] except ValueError: url = self.request.get('url') relationship", "self.response.set_status(405, 'Not modified') def put(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self,", "= webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler, name='trustHandler'), ], debug=True)", "= '' relationship = Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore'))", "desc = '' relationship = Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8',", "the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer", "to delete even if we haven't approved the relationship yet if not check.checkAuthorisation(path='trust',", "to delete a relationship (with # ?peer=true if the delete is from the", "params: trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator' in params: creator", "str(Config.root + myself.id + '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair =", "if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params", "trust relationship between this # actor and another (reciprocal relationship) (auth: only creator", "if self.request.get('approved') and len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved = True", "if 'desc' in params: desc = params['desc'] else: desc = '' if 'verify'", "json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g.", "= '' if 'creator' in params: creator = params['creator'] else: creator = None", "relationship between this # actor and another (reciprocal relationship) (auth: only creator and", "path='trust') if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'):", "if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403)", "self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/' + new_trust.peerid))", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend", "body to change details on a relationship (baseuri, secret, desc) (auth: creator, #", "if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def put(self, id,", "if 'relationship' in params: relationship = params['relationship'] if 'type' in params: type =", "== 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship", "self.put(id, relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET", "haven't approved the relationship yet if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403)", "if peerGet.lower() == \"true\": isPeer = True Config = config.config() relationships = myself.getTrustRelationships(", "if check.trust and check.trust.peerid == peerid: isPeer = True else: # Use of", "self.request.get('peerid') relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found')", "200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers", "auth_obj=check) return # We allow non-approved peers to delete even if we haven't", "id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "if 'creator' in params: creator = params['creator'] else: creator = None except ValueError:", "DELETE /trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if the delete is", "True: peer_approved = True except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship,", "and admins allowed) # POST /trust/{relationship} with json body to create new trust", "params['approved'].lower() == \"true\": approved = True else: approved = None except ValueError: if", "logging import datetime import time # /trust handlers # # GET /trust with", "ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) == 0 or len(peerid) ==", "verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"]", "type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( \"Location\",", "'' if 'creator' in params: creator = params['creator'] else: creator = None except", "if 'approved' in params: if params['approved'] and params['approved'] == True: peer_approved = True", "200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type =", "has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc)", "\"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship':", "= '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship,", "relationship, peerid) return if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust", "'/trust/' + new_trust.relationship + '/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id':", "creator: myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself, check) =", "and another (reciprocal relationship) (auth: only creator and admins allowed) # POST /trust/{relationship}", "rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified,", "relationship = self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer", "= json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params: if params['approved'] and", "of forcing no deletion of a peer # relationship even when requestor is", "0 or len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes')", "webapp2 import os from google.appengine.ext.webapp import template import json import logging import datetime", "!= 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship = '' type", "'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, }", "admin, or # peer secret) # Handling requests to trust/ class rootHandler(webapp2.RequestHandler): def", "in params: relationship = params['relationship'] if 'type' in params: type = params['type'] if", "'ignore')) if 'baseuri' in params: baseuri = params['baseuri'] else: baseuri = '' if", "a request for a relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri,", "check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"] != 200: return", "new trust # relationship (see config.py for default relationship and auto-accept, no #", "'type' in params: type = params['type'] else: type = '' if 'secret' in", "params['type'] else: type = '' if 'secret' in params: secret = params['secret'] else:", "my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret,", "or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We", "return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No content')", "if not relationships: self.response.set_status(404, 'Not found') return my_trust = relationships[0] if isPeer: deleted", "in params: secret = params['secret'] else: secret = '' if 'desc' in params:", "new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret, verificationToken=verificationToken, type=type, peer_approved=True, relationship=relationship, desc=desc) if not", "\"Not found\") def put(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "= '' type = '' peerid = '' relationship = self.request.get('relationship') type =", "relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself", "in the relationship # PUT /trust/{relationship}}/{actorid} with a json body to change details", "0: desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved,", "url = params['url'] else: url = '' if 'relationship' in params: relationship =", "if params['approved'] == True or params['approved'].lower() == \"true\": approved = True else: approved", "creator = None except ValueError: self.response.set_status(400, 'No json content') return if len(trustee_root) >", "relationships[0] # If the peer did a GET to verify if check.trust and", "self.response.set_status(403) return isPeer = False if check.trust and check.trust.peerid == peerid: isPeer =", "in params: peerid = params['id'] else: peerid = '' if 'type' in params:", "allowed) # POST /trust with json body to initiate a trust relationship between", "}) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id):", "= auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"] != 200: return if", "= my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id,", "'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out)", "and params['approved'] == True: peer_approved = True except ValueError: self.response.set_status(400, 'No json content')", "is the same as /trust if not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params", "self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self,", "auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>',", "with json body to initiate a trust relationship between this # actor and", "ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else:", "self.response.set_status(204, 'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler,", "'verified': my_trust.verified, 'verificationToken': verificationToken, 'type': my_trust.type, 'desc': my_trust.desc, 'secret': my_trust.secret, } out =", "+ str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself", "self.response.set_status(502, 'Not able to delete relationship with peer.') return self.response.set_status(204, 'Ok') application =", "self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params: baseuri =", "except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) == 0 or len(peerid)", "return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + '/' +", "is not a peer (primarily for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower()", "False if check.trust and check.trust.peerid == peerid: isPeer = True else: # Use", "rel.approved, 'peer_approved': rel.peer_approved, 'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out", "if not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/'", "#!/usr/bin/env python # from actingweb import actor from actingweb import config from actingweb", "'Created') else: self.response.set_status(202, 'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class", "'No content') def post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "if check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken", "if the delete is from the peer) (auth: creator, admin, or # peer", "details on a specific relationship (auth: creator, admin, or peer secret) # POST", "/trust/{relationship}}/{actorid} to delete a relationship (with # ?peer=true if the delete is from", "actor from actingweb import config from actingweb import trust from actingweb import auth", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='GET', peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if", "return secret = '' desc = '' relationship = Config.default_relationship type = ''", "new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship", "= json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202, 'Accepted')", "# Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship):", "'Accepted') # Handling requests to specific relationships, e.g. /trust/friend/12f2ae53bd class trustHandler(webapp2.RequestHandler): def get(self,", "query parameters (relationship, type, and peerid) to retrieve trust relationships (auth: only creator", "trustee_root = params['trustee_root'] else: trustee_root = '' if 'creator' in params: creator =", "# # GET /trust with query parameters (relationship, type, and peerid) to retrieve", "= '' if 'type' in params: type = params['type'] else: type = ''", "trust/ class rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config,", "a relationship, assume that peer has approved new_trust = myself.createVerifiedTrust(baseuri=baseuri, peerid=peerid, approved=approved, secret=secret,", "param peer=true is a way of forcing no deletion of a peer #", "a specific relationship (auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to", "else: baseuri = '' if 'desc' in params: desc = params['desc'] else: desc", "json content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204,", "new_trust.type, 'desc': new_trust.desc, 'secret': new_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\"", "add_response=False) if not myself: return if relationship != 'trustee': self.response.set_status(404, \"Not found\") return", "method='GET'): self.response.set_status(403) return relationship = '' type = '' peerid = '' relationship", "== relationship: approved = True else: approved = False # Since we received", "desc) (auth: creator, # admin, or peer secret) # DELETE /trust/{relationship}}/{actorid} to delete", "else: url = '' if 'relationship' in params: relationship = params['relationship'] if 'type'", "params: baseuri = params['baseuri'] else: baseuri = '' if 'id' in params: peerid", "in params: if params['approved'] == True or params['approved'].lower() == \"true\": approved = True", "params: baseuri = params['baseuri'] else: baseuri = '' if 'desc' in params: desc", "actor and another (reciprocal relationship) (auth: only creator and admins allowed) # POST", "method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config, myself,", "Since we received a request for a relationship, assume that peer has approved", "if 'id' in params: peerid = params['id'] else: peerid = '' if 'type'", "deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502,", "'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out) self.response.headers[\"Content-Type\"] =", "found\") return # Access is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'):", "0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and", "# auth required) # GET /trust/{relationship}}/{actorid} to get details on a specific relationship", "# POST /trust/{relationship}}/{actorid} to send information to a peer about changes in the", "= json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*,", "json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def", "myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type': new_trust.type,", "myself: return if relationship != 'trustee': self.response.set_status(404, \"Not found\") return # Access is", "auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or check.response[\"code\"] != 200: return if", "= '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri, approved=approved, desc=desc): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not", "creator and admins allowed) # POST /trust/{relationship} with json body to create new", "peerid = params['id'] else: peerid = '' if 'type' in params: type =", "from actingweb import trust from actingweb import auth import webapp2 import os from", "self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config, myself, check)", "delete is from the peer) (auth: creator, admin, or # peer secret) #", "or len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return", "peerid = '' if 'type' in params: type = params['type'] else: type =", "only creator and admins allowed) # POST /trust/{relationship} with json body to create", "secret = '' if 'desc' in params: desc = params['desc'] else: desc =", "if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else: approved = False", "content') def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship,", "no deletion of a peer # relationship even when requestor is not a", "else: # Use of GET param peer=true is a way of forcing no", "'POST': self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself", "self.request.get('baseuri') and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri = '' if", "'' if 'id' in params: peerid = params['id'] else: peerid = '' if", "or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'): self.response.set_status(403) return relationship =", "== \"true\": approved = True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri'))", "check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id, relationship): (Config,", "create new trust # relationship (see config.py for default relationship and auto-accept, no", "str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or", "desc = params['desc'] else: desc = '' if 'verify' in params: verificationToken =", "params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if 'approved' in params: if params['approved']", "'verified': rel.verified, 'type': rel.type, 'desc': rel.desc, 'secret': rel.secret, }) out = json.dumps(pairs) self.response.write(out)", "post(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if", "no # auth required) # GET /trust/{relationship}}/{actorid} to get details on a specific", "= [] for rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid,", "if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self, id,", "# actor and another (reciprocal relationship) (auth: only creator and admins allowed) #", "out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if approved: self.response.set_status(201, 'Created') else: self.response.set_status(202,", "= relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True)", "# GET /trust/{relationship}}/{actorid} to get details on a specific relationship (auth: creator, admin,", "path='trust') if not myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', method='GET'):", "if my_trust.approved: self.response.set_status(200, 'Ok') else: self.response.set_status(202, 'Accepted') def post(self, id, relationship, peerid): (Config,", "= \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id,", "0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved", "!= 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved", "config.py for default relationship and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid}", "mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship == relationship: approved = True else:", "= params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type')", "'Unable to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/'", "myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create", "= Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust:", "my_trust.verificationToken else: verificationToken = '' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid':", "pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved': rel.peer_approved,", "rootHandler(webapp2.RequestHandler): def get(self, id): if self.request.get('_method') == 'POST': self.post(id) return (Config, myself, check)", "= '' if self.request.get('desc') and len(self.request.get('desc')) > 0: desc = self.request.get('desc') else: desc", "'No json content') return if len(baseuri) == 0 or len(peerid) == 0 or", "and check.trust.peerid == peerid: isPeer = True else: # Use of GET param", "not myself or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return", "content') return if len(trustee_root) > 0: myself.setProperty('trustee_root', trustee_root) if creator: myself.modify(creator=creator) self.response.set_status(204, 'No", "'Accepted') def post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust',", "/trust/{relationship}}/{actorid} to get details on a specific relationship (auth: creator, admin, or peer", "self.response.set_status(404, 'Not found') return pairs = [] for rel in relationships: pairs.append({ 'baseuri':", "if self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' +", "json body to create new trust # relationship (see config.py for default relationship", "= self.request.get('relationship') type = self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL')", "subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in", "myself or check.response[\"code\"] != 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='POST', peerid=peerid): self.response.set_status(403)", "= self.request.get('type') if len(url) == 0: self.response.set_status(400, 'Missing peer URL') return secret =", "import os from google.appengine.ext.webapp import template import json import logging import datetime import", "(auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send information to", "+ '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri,", "my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved': my_trust.approved, 'peer_approved': my_trust.peer_approved, 'verified': my_trust.verified,", "myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if", "self.post(id) return (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or", "check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if relationship", "'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved: self.response.set_status(200,", "relationship) return self.response.set_status(404, \"Not found\") def put(self, id, relationship): (Config, myself, check) =", "desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return", "is a way of forcing no deletion of a peer # relationship even", "deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if not deleted: self.response.set_status(502, 'Not able to delete relationship", "purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True Config =", "params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship') type = self.request.get('type') if", "peer did a GET to verify if check.trust and check.trust.peerid == peerid and", "body to create new trust # relationship (see config.py for default relationship and", "def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not", "if self.request.get('approved').lower() == \"true\": approved = True else: approved = None if self.request.get('baseuri')", "subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'baseuri' in params:", "not deleted: self.response.set_status(502, 'Not able to delete relationship with peer.') return self.response.set_status(204, 'Ok')", "auth.add_auth_response(appreq=self, auth_obj=check) return # We allow non-approved peers to delete even if we", "json body to change details on a relationship (baseuri, secret, desc) (auth: creator,", "GET to verify if check.trust and check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True)", "!= \"PUT\": self.response.set_status(400, 'No json content') return if self.request.get('approved') and len(self.request.get('approved')) > 0:", "in params: desc = params['desc'] except ValueError: url = self.request.get('url') relationship = self.request.get('relationship')", "params['baseuri'] else: baseuri = '' if 'id' in params: peerid = params['id'] else:", "= myself.getTrustRelationships( relationship=relationship, peerid=peerid, type=type) if not relationships: self.response.set_status(404, 'Not found') return pairs", "with query parameters (relationship, type, and peerid) to retrieve trust relationships (auth: only", "relationship=relationship, type=type) if not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header(", "Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if not new_trust: self.response.set_status(408,", "/trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship)", "relationship (auth: creator, admin, or peer secret) # POST /trust/{relationship}}/{actorid} to send information", "not new_trust: self.response.set_status(403, 'Forbidden') return self.response.headers.add_header( \"Location\", str(Config.root + myself.id + '/trust/' +", "headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if", "to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') ==", "actingweb import trust from actingweb import auth import webapp2 import os from google.appengine.ext.webapp", "'id' in params: peerid = params['id'] else: peerid = '' if 'type' in", "not new_trust: self.response.set_status(408, 'Unable to create trust relationship') return self.response.headers.add_header( \"Location\", str(Config.root +", "self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if", "def post(self, id, relationship, peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship)", "relationship) (auth: only creator and admins allowed) # POST /trust/{relationship} with json body", "= params['id'] else: peerid = '' if 'type' in params: type = params['type']", "= params['desc'] else: desc = '' if 'verify' in params: verificationToken = params['verify']", "for testing purposes) peerGet = self.request.get('peer').lower() if peerGet.lower() == \"true\": isPeer = True", "self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" self.response.set_status(200, 'Ok') def post(self, id): (Config, myself, check) =", "peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship, add_response=False) if not myself", "json import logging import datetime import time # /trust handlers # # GET", "my_trust.desc, 'secret': my_trust.secret, } out = json.dumps(pair) self.response.write(out) self.response.headers[\"Content-Type\"] = \"application/json\" if my_trust.approved:", "rel in relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved':", "params: type = params['type'] if 'desc' in params: desc = params['desc'] except ValueError:", "> 0: desc = self.request.get('desc') else: desc = '' if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, baseuri=baseuri,", "and len(self.request.get('baseuri')) > 0: baseuri = self.request.get('baseuri') else: baseuri = '' if self.request.get('desc')", "id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'):", "the relationship # PUT /trust/{relationship}}/{actorid} with a json body to change details on", "'Ok') def post(self, id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not", "in params: if params['approved'] and params['approved'] == True: peer_approved = True except ValueError:", "= True except ValueError: self.response.set_status(400, 'No json content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved):", "== 0 or len(peerid) == 0 or len(type) == 0: self.response.set_status(400, 'Missing mandatory", "admins allowed) # POST /trust/{relationship} with json body to create new trust #", "myself.modify(creator=creator) self.response.set_status(204, 'No content') def delete(self, id, relationship): (Config, myself, check) = auth.init_actingweb(appreq=self,", "id): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust') if not myself or check.response[\"code\"]", "GET param peer=true is a way of forcing no deletion of a peer", "\"Location\", str(Config.root + myself.id + '/trust/' + new_trust.relationship + \"/\" + new_trust.peerid)) pair", "peerid): (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship) if not myself or", "myself or (check.response[\"code\"] != 200 and check.response[\"code\"] != 401): auth.add_auth_response(appreq=self, auth_obj=check) return #", "relationship (see config.py for default relationship and auto-accept, no # auth required) #", "'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified, 'type':", "isPeer = True else: # Use of GET param peer=true is a way", "not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if check.trust", "relationshipHandler(webapp2.RequestHandler): def get(self, id, relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404,", "if not check.checkAuthorisation(path='trust', subpath='<type>', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if", "new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved':", "with a json body to change details on a relationship (baseuri, secret, desc)", "peerid=peerid): self.response.set_status(403) return relationships = myself.getTrustRelationships( relationship=relationship, peerid=peerid) if not relationships: self.response.set_status(404, 'Not", "!= 200: return if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='PUT', peerid=peerid): self.response.set_status(403) return try: params", "a relationship (with # ?peer=true if the delete is from the peer) (auth:", "POST /trust with json body to initiate a trust relationship between this #", "if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return if", "/trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403) return myself.deleteProperty('trustee_root') self.response.set_status(204, 'No content') def post(self,", "= params['relationship'] if 'type' in params: type = params['type'] if 'desc' in params:", "self.request.get('_method') == 'DELETE': self.delete(id, relationship, peerid) return logging.debug('GET trust headers: ' + str(self.request.headers))", "else: approved = False # Since we received a request for a relationship,", "content') return if myself.modifyTrustAndNotify(relationship=relationship, peerid=peerid, peer_approved=peer_approved): self.response.set_status(204, 'Ok') else: self.response.set_status(405, 'Not modified') def", "params['type'] if 'desc' in params: desc = params['desc'] except ValueError: url = self.request.get('url')", "return # Access is the same as /trust if not check.checkAuthorisation(path='trust', method='DELETE'): self.response.set_status(403)", "method='POST', peerid=peerid): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) peer_approved = None if", "len(type) == 0: self.response.set_status(400, 'Missing mandatory attributes') return if Config.auto_accept_default_relationship and Config.default_relationship ==", "new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship': new_trust.relationship, 'approved': new_trust.approved, 'peer_approved': new_trust.peer_approved, 'verified': new_trust.verified,", "relationship): if self.request.get('_method') == 'POST': self.post(id, relationship) return self.response.set_status(404, \"Not found\") def put(self,", "else: self.response.set_status(405, 'Not modified') def delete(self, id, relationship, peerid): (Config, myself, check) =", "'' pair = { 'baseuri': my_trust.baseuri, 'id': myself.id, 'peerid': my_trust.peerid, 'relationship': my_trust.relationship, 'approved':", "changes in the relationship # PUT /trust/{relationship}}/{actorid} with a json body to change", "'/' + new_trust.peerid)) pair = { 'baseuri': new_trust.baseuri, 'id': myself.id, 'peerid': new_trust.peerid, 'relationship':", "id=id, path='trust', subpath=relationship, add_response=False) if not myself: return if relationship != 'trustee': self.response.set_status(404,", "= False if check.trust and check.trust.peerid == peerid: isPeer = True else: #", "len(self.request.get('approved')) > 0: if self.request.get('approved').lower() == \"true\": approved = True else: approved =", "ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No json content') return", "return secret = Config.newToken() new_trust = myself.createReciprocalTrust( url=url, secret=secret, desc=desc, relationship=relationship, type=type) if", "\"application/json\" self.response.set_status(201, 'Created') # Handling requests to /trust/*, e.g. /trust/friend class relationshipHandler(webapp2.RequestHandler): def", "check.trust.peerid == peerid and not my_trust.verified: my_trust.modify(verified=True) verificationToken = my_trust.verificationToken else: verificationToken =", "content') return if len(baseuri) == 0 or len(peerid) == 0 or len(type) ==", "else: desc = '' if 'verify' in params: verificationToken = params['verify'] else: verificationToken", "to send information to a peer about changes in the relationship # PUT", "= None except ValueError: self.response.set_status(400, 'No json content') return if len(baseuri) == 0", "and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to get details on", "not check.checkAuthorisation(path='trust', method='POST'): self.response.set_status(403) return try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'trustee_root' in", "'trustee': self.response.set_status(404, \"Not found\") return # Access is the same as /trust if", "relationships[0] if isPeer: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=False) else: deleted = myself.deleteReciprocalTrust(peerid=peerid, deletePeer=True) if", "for default relationship and auto-accept, no # auth required) # GET /trust/{relationship}}/{actorid} to", "approved = True else: approved = None if self.request.get('baseuri') and len(self.request.get('baseuri')) > 0:", "'Ok') application = webapp2.WSGIApplication([ webapp2.Route(r'/<id>/trust<:/?>', rootHandler, name='rootHandler'), webapp2.Route(r'/<id>/trust/<relationship><:/?>', relationshipHandler, name='relationshipHandler'), webapp2.Route(r'/<id>/trust/<relationship>/<peerid><:/?>', trustHandler, name='trustHandler'),", "= None except ValueError: if not self.request.get('_method') or self.request.get('_method') != \"PUT\": self.response.set_status(400, 'No", "trust headers: ' + str(self.request.headers)) (Config, myself, check) = auth.init_actingweb(appreq=self, id=id, path='trust', subpath=relationship)", "verificationToken = params['verify'] else: verificationToken = None except ValueError: self.response.set_status(400, 'No json content')", "relationships: pairs.append({ 'baseuri': rel.baseuri, 'id': myself.id, 'peerid': rel.peerid, 'relationship': rel.relationship, 'approved': rel.approved, 'peer_approved':", "= Config.default_relationship type = '' try: params = json.loads(self.request.body.decode('utf-8', 'ignore')) if 'url' in", "if not check.checkAuthorisation(path='trust', subpath='<type>/<id>', method='DELETE', peerid=peerid, approved=False): self.response.set_status(403) return isPeer = False if", "with json body to create new trust # relationship (see config.py for default" ]
[ "___ | |_ 110011 00110110 | /| | | ___ \\/ _ \\|", "in the GUI no longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball", "10100 | ___ \\ | | ___ \\ | | 00101 110011 |", "is missing - Loadout configuration is broken Thanks to ccman32 and dtracers for", "now get information about the ball's status in Dropshot mode thanks to hallo_doei!", "Giving specific error messages when cfg files are messed up. \"\"\", '1.2.2': \"\"\"", "configs more effectively. - Fixed bug where RUN button behavior in the GUI", "the postgame. - tarehart - Fixed a bug where bots would dodge when", "about the ball's status in Dropshot mode thanks to hallo_doei! Read all about", "the rendering strategy for 3D lines that go past the camera. Formerly it", "row in the GUI will no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\"", "Faster way to access ball prediction data in python. - Skyborg - Java", "feature is quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more", "with relative paths for agents. Fixed agent preset loading to allow multiple agents", "pad data is fixed - Loadout configuration is fixed Thanks to ccman32 and", "up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI a bit, and made it", "missing - Loadout configuration is broken Thanks to ccman32 and dtracers for delivering", "experienced errors related to memory access. The limit is now only double the", "Zaptive - Subprocess agent for future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\"", "it\". - Showing the rate that inputs are received for each player index", "warning seen when using the GUI. - Giving specific error messages when cfg", "stderr. - Dragging bots to another team in the GUI no longer breaks", "Fixed a bug where the GUI would crash with a \"KeyError\". - hallo_doei", "the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate", "sometimes\", now it will be \"don't draw it\". - Showing the rate that", "bot without it, but this feature is quite nice for the scientists among", "'1.5.0': \"\"\" Adding a have_internet helper function to help streamline upgrade checks. -", "a great bot without it, but this feature is quite nice for the", "subject to interpolation. You can still make a great bot without it, but", "config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops and Dropshot modes! -", "can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes", "\"\"\" - We now offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level", "Fixed a bug where party_member_bot could get influenced by real controller input. -", "bots. \"\"\", '1.2.0': \"\"\" - We now offer a 'RigidBodyTick' thanks to whatisaphone!", "\"\"\" - New core dll that is less likely to break when Rocket", "release! We actually left \"beta\" a long time ago so this isn't as", "- Showing the rate that inputs are received for each player index when", "button twice in a row in the GUI will no longer spawn duplicate", "to saved/loaded correctly if they have the same name. - ima9rd \"\"\", '1.6.0':\"\"\"", "camera. Formerly it was \"draw it, even though it's crazy sometimes\", now it", "Rocket League 1.49 and RLBot 0.0.30, ask for instructions on discord. \"\"\", '0.0.30':", "# https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI crash", "\"\"\" Adding support for auto-running java bots during tournaments. To take advantage of", "the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32,", "provide a list of future ball positions based on chip's excellent physics modeling.", "correctly if they have the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support", "than the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge cancels", "arena, not hoops or dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction", "- ccman32 - Fixed a bug where the GUI would crash with a", "to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League patch broke", "have_internet helper function to help streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\"", "the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops and Dropshot modes!", "\"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge cancels / half flips! -", "bots twice in a row. - Clicking on the \"Run\" button twice in", "for delivering this short-term fix quickly. We will follow this up with a", "\\ | | ___ \\ | | 00101 110011 | |_/ / |", "- Redox - Fancy release notes - tarehart and Skyborg \"\"\" } release_banner", "it. \"\"\", '1.0.5': \"\"\" Maximum size for a render message has been decreased", "- Fancy release notes - tarehart and Skyborg \"\"\" } release_banner = \"\"\"", "down when the python framework quits. This has been necessary recently to avoid", "Skyborg \"\"\" } release_banner = \"\"\" ______ _ ______ _ 10100 | ___", "saving preset then canceling - hallo-doei - Adding file checking before injection (Resolves", "= '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI crash when loading certain", "for 3D lines that go past the camera. Formerly it was \"draw it,", "This has been necessary recently to avoid buggy situations. - Shutting down the", "now appear as 1 instead of 0. - Fixed a crash that would", "- Avoiding and suppressing some game crashes, and also restoring the ability to", "'1.0.3': \"\"\" Time for the big 1.0 release! We actually left \"beta\" a", "to Rocket League patch 1.50 with the following known issues: - Dropshot tile", "al. \"\"\", '1.2.6': \"\"\" Fixed a bug where field info was not extracted", "Shutting down the python framework will no longer attempt to kill bots twice", "= { '1.6.1': \"\"\" Fixed GUI crash when loading certain RLBot config files", "* Fix for dodge cancels / half flips! - ccman32 * *************************************************** Plus:", "not hoops or dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code", "config for orange team is now respected again. - ccman32 - Fixed a", "to launch Rocket League when clicking run if no Rocket League process is", "1 instead of 0. - Fixed a crash that would commonly happen after", "\"\"\" Faster way to access ball prediction data in python. - Skyborg \"\"\",", "https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2. Ball prediction. We now", "/ half flips! - ccman32 * *************************************************** Plus: - Changing the rendering strategy", "00110110 | /| | | ___ \\/ _ \\| __| 01101100 010010 |", "can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: -", "Bug fix for people with spaces in their file path by Zaptive -", "https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI crash when", "commonly happen after a match ends. As a side effect, you can no", "- tarehart and Skyborg \"\"\" } release_banner = \"\"\" ______ _ ______ _", "double the original. \"\"\", '1.0.4': \"\"\" - Maximum size for a render message", "if __version__ in release_notes: return release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask", "Code written by hallo_doei, ccman32, and tarehart 2. Ball prediction. We now provide", "fix quickly. We will follow this up with a proper fix as soon", "this isn't as big a milestone as the number implies, but we DO", "\"\"\", '1.2.6': \"\"\" Fixed a bug where field info was not extracted properly", "Showing the rate that inputs are received for each player index when you", "increased by a factor of 100. This means you can draw a lot", "patched - ccman32 and hallo-doei - Fixed bug resulting in incorrect quickchat -", "would commonly happen after a match ends. As a side effect, you can", "of this to do next-level wall reads, catches, and dribbles! You can read", "can import it in setup.py for the same reason # 3) we can", "so: # 1) we don't load dependencies by storing it in __init__.py #", "in a row. - Clicking on the \"Run\" button twice in a row", "ball prediction data in python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster way", "GUI works better now. - Got rid of the libpng warning seen when", "correct path - hallo-doei - Fix for GUI crash when saving preset then", "Setting game state. You can manipulate the position, velocity, etc of the ball", "a bug where bots would dodge when they intended to double jump. -tarehart", "/ | | |_/ / ___ | |_ 110011 00110110 | /| |", "data in python. - Skyborg - Java bots will now shut down when", "and track appearance configs more effectively. - Fixed bug where RUN button behavior", "broken Thanks to ccman32 and dtracers for delivering this short-term fix quickly. We", "- You can now play on Salty Shores thanks to hallo_doei - Bug", "\\/ _ \\| __| 01101100 010010 | |\\ \\| |____| |_/ / (_)", "player index when you press the [home] key. Toggle back off with the", "size for a render message has been increased by a factor of 100.", "relative paths for agents. Fixed agent preset loading to allow multiple agents to", "left \"beta\" a long time ago so this isn't as big a milestone", "game state. You can manipulate the position, velocity, etc of the ball and", "can manipulate the position, velocity, etc of the ball and the cars! This", "- New core dll that is less likely to break when Rocket League", "python. - Skyborg \"\"\", '1.1.1': \"\"\" You can now get information about the", "been decreased again because many people experienced errors related to memory access. The", "off with the [end] key. - Fixed a bug where party_member_bot could get", "Accurate ball prediction for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et.", "auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI when no", "has been necessary recently to avoid buggy situations. - Shutting down the python", "and made it load and track appearance configs more effectively. - Fixed bug", "ball prediction data in python. - Skyborg - Java bots will now shut", "suppressing some game crashes, and also restoring the ability to get game tick", "at https://discord.gg/5cNbXgG \" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner)", "Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\"", "is now only double the original. \"\"\", '1.0.4': \"\"\" - Maximum size for", "during bot development, and you can also get creative with it. Visit the", "it. Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by", "ball positions based on chip's excellent physics modeling. Take advantage of this to", "Boost amount for cars will now round up to the nearest integer, so", "Plus: - Changing the rendering strategy for 3D lines that go past the", "that go past the camera. Formerly it was \"draw it, even though it's", "real controller input. - Creating new presets in the GUI works better now.", "return '' def get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\", "| | ___ \\ | | 00101 110011 | |_/ / | |", "- Shutting down the python framework will no longer attempt to kill bots", "https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate on the standard arena,", "where field info was not extracted properly during dropshot mode. It was reporting", "that is less likely to break when Rocket League is patched - ccman32", "whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket League patch 1.50. Compared", "support for auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI", "read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are", "a row. - Clicking on the \"Run\" button twice in a row in", "is fixed - Boost pad data is fixed - Loadout configuration is fixed", "has been increased by a factor of 100. This means you can draw", "advantage of this to do next-level wall reads, catches, and dribbles! You can", "\\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return ''", "\"\"\" You can now get information about the ball's status in Dropshot mode", "once without getting errors. - Boost amount for cars will now round up", "attempt to kill bots twice in a row. - Clicking on the \"Run\"", "':' not showing up in the GUI - hallo-doei - Fix for GUI", "function to help streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support", "currently the wall bounces are only accurate on the standard arena, not hoops", "Boost pad data is missing - Loadout configuration is broken Thanks to ccman32", "also choose to stay on Rocket League 1.49 and RLBot 0.0.30, ask for", "your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': \"\"\"", "the GUI no longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction", "Fixed a bug where bots would dodge when they intended to double jump.", "instant replays. \"\"\", '1.0.3': \"\"\" Time for the big 1.0 release! We actually", "- Fixed a bug where auto-run executables would crash when trying to write", "RLBot 0.0.30, ask for instructions on discord. \"\"\", '0.0.30': \"\"\" - New core", "crash when trying to write to stderr. - Dragging bots to another team", "a render message has been increased by a factor of 100. This means", "comprehensive fix for Rocket League patch 1.50. Compared to previous version: - Dropshot", "up in the GUI - hallo-doei - Fix for GUI not saving correct", "you press the [home] key. Toggle back off with the [end] key. -", "\"\"\" More comprehensive fix for Rocket League patch 1.50. Compared to previous version:", "lines that go past the camera. Formerly it was \"draw it, even though", "another team in the GUI no longer breaks the config. \"\"\", '1.3.0': \"\"\"", "would dodge when they intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The", "are only accurate on the standard arena, not hoops or dropshot. Documentation and", "crash when saving preset then canceling - hallo-doei - Adding file checking before", "Rocket League patch 1.50 with the following known issues: - Dropshot tile data", "a have_internet helper function to help streamline upgrade checks. - ima9rd \"\"\", '1.4.2':", "though it's crazy sometimes\", now it will be \"don't draw it\". - Showing", "on the \"Run\" button twice in a row in the GUI will no", "appearance configs more effectively. - Fixed bug where RUN button behavior in the", "issues: - Dropshot tile data is missing - Boost pad data is missing", "intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League patch", "crazy sometimes\", now it will be \"don't draw it\". - Showing the rate", "1. Setting game state. You can manipulate the position, velocity, etc of the", "details! - Faster way to access ball prediction data in python. - Skyborg", "appear as 1 instead of 0. - Fixed a crash that would commonly", "multiple agents to saved/loaded correctly if they have the same name. - ima9rd", "'0.0.31': \"\"\" Rapid response to Rocket League patch 1.50 with the following known", "'1.1.3': \"\"\" - Faster way to access ball prediction data in python. -", "no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to access ball", "showing up in the GUI - hallo-doei - Fix for GUI not saving", "\"\"\" The latest Rocket League patch broke dodges for our bots; this update", "Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket League", "access ball prediction data in python. - Skyborg \"\"\", '1.1.1': \"\"\" You can", "for dodge cancels / half flips! - ccman32 * *************************************************** Plus: - Changing", "- Adding file checking before injection (Resolves #167) - Redox - Fixed typo", "using the GUI. - Giving specific error messages when cfg files are messed", "- Added more built-in colors to the python rendering manager - Eastvillage -", "You can now get information about the ball's status in Dropshot mode thanks", "now shut down when the python framework quits. This has been necessary recently", "ask for instructions on discord. \"\"\", '0.0.30': \"\"\" - New core dll that", "You can manipulate the position, velocity, etc of the ball and the cars!", "* *************************************************** Plus: - Changing the rendering strategy for 3D lines that go", "- Fixed typo in rlbot.cfg - Redox - Fancy release notes - tarehart", "Skyborg \"\"\", '1.1.1': \"\"\" You can now get information about the ball's status", "to interpolation. You can still make a great bot without it, but this", "about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only", "cancels / half flips! - ccman32 * *************************************************** Plus: - Changing the rendering", "Updated GUI to launch Rocket League when clicking run if no Rocket League", "python framework will no longer attempt to kill bots twice in a row.", "the [home] key. Toggle back off with the [end] key. - Fixed a", "for the same reason # 3) we can import it into your module", "so this isn't as big a milestone as the number implies, but we", "Fixed crash with GUI when no default RLBot.cfg file was found. Updated GUI", "game crashes, and also restoring the ability to get game tick data during", "- ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting .NET executables. \"\"\", '1.5.1':", "- Dragging bots to another team in the GUI no longer breaks the", "time ago so this isn't as big a milestone as the number implies,", "bug fixes: - Fixed a bug where auto-run executables would crash when trying", "follow this up with a proper fix as soon as possible. You may", "now provide a list of future ball positions based on chip's excellent physics", "python. - Skyborg - Java bots will now shut down when the python", "it's crazy sometimes\", now it will be \"don't draw it\". - Showing the", "by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket League patch 1.50.", "now only double the original. \"\"\", '1.0.4': \"\"\" - Maximum size for a", "Boost pad data is fixed - Loadout configuration is fixed Thanks to ccman32", "quickchat - dtracers - Added more built-in colors to the python rendering manager", "factor of 100. This means you can draw a lot of lines at", "'0.0.30': \"\"\" - New core dll that is less likely to break when", "size for a render message has been decreased again because many people experienced", "RUN button behavior in the GUI would not work after killing bots. \"\"\",", "bug resulting in incorrect quickchat - dtracers - Added more built-in colors to", "after killing bots. \"\"\", '1.2.0': \"\"\" - We now offer a 'RigidBodyTick' thanks", "GUI a bit, and made it load and track appearance configs more effectively.", "can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces", "[end] key. - Fixed a bug where party_member_bot could get influenced by real", "that inputs are received for each player index when you press the [home]", "hallo_doei - Avoiding and suppressing some game crashes, and also restoring the ability", "\"\"\" } release_banner = \"\"\" ______ _ ______ _ 10100 | ___ \\", "features! 1. Setting game state. You can manipulate the position, velocity, etc of", "agent for future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix", "dribbles! You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the", "Java bots will now shut down when the python framework quits. This has", "__init__.py # 2) we can import it in setup.py for the same reason", "\"\"\" - Maximum size for a render message has been increased by a", "details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2.", "all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange", "Thanks to ccman32 and dtracers for delivering this fix quickly! \"\"\", '0.0.31': \"\"\"", "and you can also get creative with it. Visit the wiki for details", "when the python framework quits. This has been necessary recently to avoid buggy", "Loadout configuration is fixed Thanks to ccman32 and dtracers for delivering this fix", "- hallo-doei - Adding file checking before injection (Resolves #167) - Redox -", "'1.0.6': \"\"\" The latest Rocket League patch broke dodges for our bots; this", "upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running java bots", "duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to access ball prediction data in", "no Rocket League process is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a", "spaces in their file path by Zaptive - Subprocess agent for future Rust", "by chip and tarehart Bonus: - You can now play on Salty Shores", "for future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for", "for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6':", "a match ends. As a side effect, you can no longer see up-to-date", "it, even though it's crazy sometimes\", now it will be \"don't draw it\".", "here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate on the standard", "it in __init__.py # 2) we can import it in setup.py for the", "hallo-doei - Fixed bug resulting in incorrect quickchat - dtracers - Added more", "configuration is fixed Thanks to ccman32 and dtracers for delivering this fix quickly!", "- Rearranged the GUI a bit, and made it load and track appearance", "} release_banner = \"\"\" ______ _ ______ _ 10100 | ___ \\ |", "people experienced errors related to memory access. The limit is now only double", "key. Toggle back off with the [end] key. - Fixed a bug where", "Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a", "the big 1.0 release! We actually left \"beta\" a long time ago so", "in the GUI would not work after killing bots. \"\"\", '1.2.0': \"\"\" -", "The loadout config for orange team is now respected again. - ccman32 -", "___ \\/ _ \\| __| 01101100 010010 | |\\ \\| |____| |_/ /", "after a match ends. As a side effect, you can no longer see", "version here so: # 1) we don't load dependencies by storing it in", "written by hallo_doei, ccman32, and tarehart 2. Ball prediction. We now provide a", "__| 01101100 010010 | |\\ \\| |____| |_/ / (_) | |_ 010010", "info was not extracted properly during dropshot mode. It was reporting 2 goals", "Shores thanks to hallo_doei - Bug fix for people with spaces in their", "\"draw it, even though it's crazy sometimes\", now it will be \"don't draw", "longer see up-to-date player data during instant replays. \"\"\", '1.0.3': \"\"\" Time for", "crash with a \"KeyError\". - hallo_doei - Avoiding and suppressing some game crashes,", "\"\"\" Maximum size for a render message has been decreased again because many", "tarehart Bonus: - You can now play on Salty Shores thanks to hallo_doei", "dodge cancels / half flips! - ccman32 * *************************************************** Plus: - Changing the", "dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip", "in python. - Skyborg \"\"\", '1.1.1': \"\"\" You can now get information about", "_ \\| __| 01101100 010010 | |\\ \\| |____| |_/ / (_) |", "- ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper function to help streamline", "can no longer see up-to-date player data during instant replays. \"\"\", '1.0.3': \"\"\"", ".NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI when no default RLBot.cfg", "the camera. Formerly it was \"draw it, even though it's crazy sometimes\", now", "and dtracers for delivering this short-term fix quickly. We will follow this up", "bots to another team in the GUI no longer breaks the config. \"\"\",", "- Fixed a bug where the GUI would crash with a \"KeyError\". -", "patch 1.50 with the following known issues: - Dropshot tile data is missing", "\"\"\", '1.0.4': \"\"\" - Maximum size for a render message has been increased", "position, velocity, etc of the ball and the cars! This can be a", "| ___ \\ | | 00101 110011 | |_/ / | | |_/", "|____| |_/ / (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001", "standard arena, not hoops or dropshot. Documentation and examples can be found here:", "Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and", "is now respected again. - ccman32 - Fixed a bug where the GUI", "a crash that would commonly happen after a match ends. As a side", "so 0.3% boost will now appear as 1 instead of 0. - Fixed", "have two great new features! 1. Setting game state. You can manipulate the", "be a great help during bot development, and you can also get creative", "- Boost pad data is fixed - Loadout configuration is fixed Thanks to", "_ 10100 | ___ \\ | | ___ \\ | | 00101 110011", "bug where party_member_bot could get influenced by real controller input. - Creating new", "saving correct path - hallo-doei - Fix for GUI crash when saving preset", "isn't as big a milestone as the number implies, but we DO have", "(_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes():", "we can import it in setup.py for the same reason # 3) we", "the libpng warning seen when using the GUI. - Giving specific error messages", "'1.5.1': \"\"\" Fixed crash with GUI when no default RLBot.cfg file was found.", "with a \"KeyError\". - hallo_doei - Avoiding and suppressing some game crashes, and", "of the ball and the cars! This can be a great help during", "a side effect, you can no longer see up-to-date player data during instant", "it load and track appearance configs more effectively. - Fixed bug where RUN", "found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: - You can", "way to access ball prediction data in python. - Skyborg \"\"\", '1.1.3': \"\"\"", "110011 | |_/ / | | |_/ / ___ | |_ 110011 00110110", "wall reads, catches, and dribbles! You can read about the math involved here:", "if they have the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for", "to previous version: - Dropshot tile data is fixed - Boost pad data", "the standard arena, not hoops or dropshot. Documentation and examples can be found", "\"\"\" Fixed GUI crash when loading certain RLBot config files with relative paths", "with GUI when no default RLBot.cfg file was found. Updated GUI to launch", "pad data is missing - Loadout configuration is broken Thanks to ccman32 and", "- Dropshot tile data is missing - Boost pad data is missing -", "saved/loaded correctly if they have the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add", "by a factor of 100. This means you can draw a lot of", "- Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug where", "et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug where field info was not", "data is fixed - Loadout configuration is fixed Thanks to ccman32 and dtracers", "when trying to write to stderr. - Dragging bots to another team in", "https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where auto-run executables would crash", "no longer see up-to-date player data during instant replays. \"\"\", '1.0.3': \"\"\" Time", "preset then canceling - hallo-doei - Adding file checking before injection (Resolves #167)", "physics data which updates at 120Hz and is not subject to interpolation. You", "prediction for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\",", "can be a great help during bot development, and you can also get", "still make a great bot without it, but this feature is quite nice", "01101100 010010 | |\\ \\| |____| |_/ / (_) | |_ 010010 10010", "010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in", "2) we can import it in setup.py for the same reason # 3)", "where RUN button behavior in the GUI would not work after killing bots.", "the \"Run\" button twice in a row in the GUI will no longer", "would crash when trying to write to stderr. - Dragging bots to another", "- Changing the rendering strategy for 3D lines that go past the camera.", "for the big 1.0 release! We actually left \"beta\" a long time ago", "get game tick data during replays and the postgame. - tarehart - Fixed", "\"\"\", '1.0.6': \"\"\" The latest Rocket League patch broke dodges for our bots;", "auto-run executables would crash when trying to write to stderr. - Dragging bots", "\"\"\", '1.1.3': \"\"\" - Faster way to access ball prediction data in python.", "with a ':' not showing up in the GUI - hallo-doei - Fix", "in incorrect quickchat - dtracers - Added more built-in colors to the python", "- Creating new presets in the GUI works better now. - Got rid", "version: - Dropshot tile data is fixed - Boost pad data is fixed", "'0.0.32': \"\"\" More comprehensive fix for Rocket League patch 1.50. Compared to previous", "Discord at https://discord.gg/5cNbXgG \" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes():", "in the GUI - hallo-doei - Fix for GUI not saving correct path", "interpolation. You can still make a great bot without it, but this feature", "missing - Boost pad data is missing - Loadout configuration is broken Thanks", "League when clicking run if no Rocket League process is found. - ima9rd", "- Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to access ball prediction data", "update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for a render message has", "\"\"\", '0.0.31': \"\"\" Rapid response to Rocket League patch 1.50 with the following", "- Skyborg - Java bots will now shut down when the python framework", "- Faster way to access ball prediction data in python. - Skyborg \"\"\",", "data is missing - Boost pad data is missing - Loadout configuration is", "- Fix for GUI crash when saving preset then canceling - hallo-doei -", "no default RLBot.cfg file was found. Updated GUI to launch Rocket League when", "is not subject to interpolation. You can still make a great bot without", "release_notes: return release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask on Discord at", "3D lines that go past the camera. Formerly it was \"draw it, even", "by real controller input. - Creating new presets in the GUI works better", "to ccman32 and dtracers for delivering this short-term fix quickly. We will follow", "made it load and track appearance configs more effectively. - Fixed bug where", "Other changes: - The loadout config for orange team is now respected again.", "be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: - You", "are received for each player index when you press the [home] key. Toggle", "release_banner = \"\"\" ______ _ ______ _ 10100 | ___ \\ | |", "for more details! - Faster way to access ball prediction data in python.", "RLBot.cfg file was found. Updated GUI to launch Rocket League when clicking run", "loading to allow multiple agents to saved/loaded correctly if they have the same", "name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting .NET executables. \"\"\",", "and Skyborg \"\"\" } release_banner = \"\"\" ______ _ ______ _ 10100 |", "a bug where field info was not extracted properly during dropshot mode. It", "- hallo-doei - Fix for GUI not saving correct path - hallo-doei -", "GUI when no default RLBot.cfg file was found. Updated GUI to launch Rocket", "'1.0.5': \"\"\" Maximum size for a render message has been decreased again because", "this short-term fix quickly. We will follow this up with a proper fix", "- Faster way to access ball prediction data in python. - Skyborg -", "file was found. Updated GUI to launch Rocket League when clicking run if", "GUI to launch Rocket League when clicking run if no Rocket League process", "catches, and dribbles! You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note:", "reporting 2 goals rather than the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** *", "creative with it. Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code", "number implies, but we DO have two great new features! 1. Setting game", "java bots during tournaments. To take advantage of this in your bot, see", "League 1.49 and RLBot 0.0.30, ask for instructions on discord. \"\"\", '0.0.30': \"\"\"", "- Boost amount for cars will now round up to the nearest integer,", "a lot of lines at once without getting errors. - Boost amount for", "canceling - hallo-doei - Adding file checking before injection (Resolves #167) - Redox", "- Loadout configuration is broken Thanks to ccman32 and dtracers for delivering this", "new features! 1. Setting game state. You can manipulate the position, velocity, etc", "also get creative with it. Visit the wiki for details and documentation -", "#167) - Redox - Fixed typo in rlbot.cfg - Redox - Fancy release", "instructions on discord. \"\"\", '0.0.30': \"\"\" - New core dll that is less", "future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket", "by Zaptive - Subprocess agent for future Rust support by whatisaphone \"\"\", '0.0.32':", "row. - Clicking on the \"Run\" button twice in a row in the", "killing bots. \"\"\", '1.2.0': \"\"\" - We now offer a 'RigidBodyTick' thanks to", "We now provide a list of future ball positions based on chip's excellent", "\\| __| 01101100 010010 | |\\ \\| |____| |_/ / (_) | |_", "clicking run if no Rocket League process is found. - ima9rd \"\"\", '1.5.0':", "a render message has been decreased again because many people experienced errors related", "in __init__.py # 2) we can import it in setup.py for the same", "- Fixed bug where RUN button behavior in the GUI would not work", "\"\"\", '1.2.2': \"\"\" - Rearranged the GUI a bit, and made it load", "button behavior in the GUI would not work after killing bots. \"\"\", '1.2.0':", "rather than the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge", "more effectively. - Fixed bug where RUN button behavior in the GUI would", "this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug", "dependencies by storing it in __init__.py # 2) we can import it in", "to access ball prediction data in python. - Skyborg \"\"\", '1.1.1': \"\"\" You", "or dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by", "not showing up in the GUI - hallo-doei - Fix for GUI not", "data during replays and the postgame. - tarehart - Fixed a bug where", "means you can draw a lot of lines at once without getting errors.", "only accurate on the standard arena, not hoops or dropshot. Documentation and examples", "scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access", "https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: - You can now play", "fixed - Boost pad data is fixed - Loadout configuration is fixed Thanks", "new presets in the GUI works better now. - Got rid of the", "\"\"\", '1.5.1': \"\"\" Fixed crash with GUI when no default RLBot.cfg file was", "for items with a ':' not showing up in the GUI - hallo-doei", "we can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1'", "You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall", "release notes - tarehart and Skyborg \"\"\" } release_banner = \"\"\" ______ _", "the GUI - hallo-doei - Fix for GUI not saving correct path -", "nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster", "a list of future ball positions based on chip's excellent physics modeling. Take", "track appearance configs more effectively. - Fixed bug where RUN button behavior in", "___ \\ | | ___ \\ | | 00101 110011 | |_/ /", "notes - tarehart and Skyborg \"\"\" } release_banner = \"\"\" ______ _ ______", "but this feature is quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick", "again. - ccman32 - Fixed a bug where the GUI would crash with", "the version here so: # 1) we don't load dependencies by storing it", "on Salty Shores thanks to hallo_doei - Bug fix for people with spaces", "it will be \"don't draw it\". - Showing the rate that inputs are", "us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access ball prediction", "in python. - Skyborg - Java bots will now shut down when the", "|_/ / ___ | |_ 110011 00110110 | /| | | ___ \\/", "data in python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to access", "original. \"\"\", '1.0.4': \"\"\" - Maximum size for a render message has been", "\"\"\", '1.1.2': \"\"\" Faster way to access ball prediction data in python. -", "Fix for dodge cancels / half flips! - ccman32 * *************************************************** Plus: -", "\"\"\" - Faster way to access ball prediction data in python. - Skyborg", "- Fixed a bug where bots would dodge when they intended to double", "and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed", "cars! This can be a great help during bot development, and you can", "More comprehensive fix for Rocket League patch 1.50. Compared to previous version: -", "loadout config for orange team is now respected again. - ccman32 - Fixed", "data is missing - Loadout configuration is broken Thanks to ccman32 and dtracers", "League patch 1.50. Compared to previous version: - Dropshot tile data is fixed", "hallo_doei - Bug fix for people with spaces in their file path by", "a row in the GUI will no longer spawn duplicate processes. \"\"\", '1.1.2':", "Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\"", "'1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI crash when loading certain RLBot", "better now. - Got rid of the libpng warning seen when using the", "help streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running", "module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed", "to the python rendering manager - Eastvillage - Fix for items with a", "|_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__", "\"Run\" button twice in a row in the GUI will no longer spawn", "| |_/ / ___ | |_ 110011 00110110 | /| | | ___", "bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where auto-run executables", "trying to write to stderr. - Dragging bots to another team in the", "changes: - The loadout config for orange team is now respected again. -", "Note: currently the wall bounces are only accurate on the standard arena, not", "it in setup.py for the same reason # 3) we can import it", "incorrect quickchat - dtracers - Added more built-in colors to the python rendering", "now play on Salty Shores thanks to hallo_doei - Bug fix for people", "game tick data during replays and the postgame. - tarehart - Fixed a", "the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting .NET", "the GUI would not work after killing bots. \"\"\", '1.2.0': \"\"\" - We", "def get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or report", "for Rocket League patch 1.50. Compared to previous version: - Dropshot tile data", "to whatisaphone! It's a lower-level representation of physics data which updates at 120Hz", "team in the GUI no longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate", "Added more built-in colors to the python rendering manager - Eastvillage - Fix", "- ccman32 * *************************************************** Plus: - Changing the rendering strategy for 3D lines", "related to memory access. The limit is now only double the original. \"\"\",", "python rendering manager - Eastvillage - Fix for items with a ':' not", "hallo-doei - Fix for GUI crash when saving preset then canceling - hallo-doei", "to do next-level wall reads, catches, and dribbles! You can read about the", "tile data is missing - Boost pad data is missing - Loadout configuration", "no longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops", "examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus:", "the same reason # 3) we can import it into your module module", "been increased by a factor of 100. This means you can draw a", "core dll that is less likely to break when Rocket League is patched", "Fix for items with a ':' not showing up in the GUI -", "shut down when the python framework quits. This has been necessary recently to", "same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting .NET executables.", "our bots; this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for a", "player data during instant replays. \"\"\", '1.0.3': \"\"\" Time for the big 1.0", "of the libpng warning seen when using the GUI. - Giving specific error", "|_ 110011 00110110 | /| | | ___ \\/ _ \\| __| 01101100", "def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return '' def get_help_text(): return", "and the postgame. - tarehart - Fixed a bug where bots would dodge", "module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI", "Plus bug fixes: - Fixed a bug where auto-run executables would crash when", "of 0. - Fixed a crash that would commonly happen after a match", "response to Rocket League patch 1.50 with the following known issues: - Dropshot", "tile data is fixed - Boost pad data is fixed - Loadout configuration", "and hallo-doei - Fixed bug resulting in incorrect quickchat - dtracers - Added", "get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return '' def get_help_text(): return \"Trouble?", "GUI would crash with a \"KeyError\". - hallo_doei - Avoiding and suppressing some", "for each player index when you press the [home] key. Toggle back off", "the python framework will no longer attempt to kill bots twice in a", "Rocket League patch 1.50. Compared to previous version: - Dropshot tile data is", "dodges for our bots; this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size", "launch Rocket League when clicking run if no Rocket League process is found.", "reads, catches, and dribbles! You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/", "Rocket League is patched - ccman32 and hallo-doei - Fixed bug resulting in", "restoring the ability to get game tick data during replays and the postgame.", "not work after killing bots. \"\"\", '1.2.0': \"\"\" - We now offer a", "to help streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support for", "amount for cars will now round up to the nearest integer, so 0.3%", "when using the GUI. - Giving specific error messages when cfg files are", "You may also choose to stay on Rocket League 1.49 and RLBot 0.0.30,", "_ ______ _ 10100 | ___ \\ | | ___ \\ | |", "| |_ 110011 00110110 | /| | | ___ \\/ _ \\| __|", "Dragging bots to another team in the GUI no longer breaks the config.", "| 00101 110011 | |_/ / | | |_/ / ___ | |_", "when no default RLBot.cfg file was found. Updated GUI to launch Rocket League", "Faster way to access ball prediction data in python. - Skyborg \"\"\", '1.1.3':", "\"KeyError\". - hallo_doei - Avoiding and suppressing some game crashes, and also restoring", "and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart", "this feature is quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for", "GUI - hallo-doei - Fix for GUI not saving correct path - hallo-doei", "errors. - Boost amount for cars will now round up to the nearest", "where party_member_bot could get influenced by real controller input. - Creating new presets", "manipulate the position, velocity, etc of the ball and the cars! This can", "delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket League patch", "to access ball prediction data in python. - Skyborg \"\"\", '1.1.3': \"\"\" -", "buggy situations. - Shutting down the python framework will no longer attempt to", "Fixed agent preset loading to allow multiple agents to saved/loaded correctly if they", "- Fix for items with a ':' not showing up in the GUI", "The latest Rocket League patch broke dodges for our bots; this update fixes", "the [end] key. - Fixed a bug where party_member_bot could get influenced by", "0. - Fixed a crash that would commonly happen after a match ends.", "checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running java bots during", "process is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper function", "longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops and", "Compared to previous version: - Dropshot tile data is fixed - Boost pad", "in Dropshot mode thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other", "is broken Thanks to ccman32 and dtracers for delivering this short-term fix quickly.", "message has been increased by a factor of 100. This means you can", "get information about the ball's status in Dropshot mode thanks to hallo_doei! Read", "crash when loading certain RLBot config files with relative paths for agents. Fixed", "\"\"\" def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return '' def get_help_text():", "- Bug fix for people with spaces in their file path by Zaptive", "where the GUI would crash with a \"KeyError\". - hallo_doei - Avoiding and", "Redox - Fancy release notes - tarehart and Skyborg \"\"\" } release_banner =", "strategy for 3D lines that go past the camera. Formerly it was \"draw", "soon as possible. You may also choose to stay on Rocket League 1.49", "prediction data in python. - Skyborg \"\"\", '1.1.1': \"\"\" You can now get", "[home] key. Toggle back off with the [end] key. - Fixed a bug", "patch 1.50. Compared to previous version: - Dropshot tile data is fixed -", "thanks to hallo_doei - Bug fix for people with spaces in their file", "following known issues: - Dropshot tile data is missing - Boost pad data", "Fixed bug where RUN button behavior in the GUI would not work after", "\"\"\" Rapid response to Rocket League patch 1.50 with the following known issues:", "\"don't draw it\". - Showing the rate that inputs are received for each", "in setup.py for the same reason # 3) we can import it into", "during replays and the postgame. - tarehart - Fixed a bug where bots", "Take advantage of this to do next-level wall reads, catches, and dribbles! You", "they have the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto", "fixed Thanks to ccman32 and dtracers for delivering this fix quickly! \"\"\", '0.0.31':", "to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout", "__version__ in release_notes: return release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask on", "make a great bot without it, but this feature is quite nice for", "latest Rocket League patch broke dodges for our bots; this update fixes it.", "It was reporting 2 goals rather than the expected 140. \"\"\", '1.2.5': \"\"\"", "00101 110011 | |_/ / | | |_/ / ___ | |_ 110011", "'1.2.0': \"\"\" - We now offer a 'RigidBodyTick' thanks to whatisaphone! It's a", "do next-level wall reads, catches, and dribbles! You can read about the math", "can now get information about the ball's status in Dropshot mode thanks to", "bug where auto-run executables would crash when trying to write to stderr. -", "have the same name. - ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting", "for auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI when", "Adding a have_internet helper function to help streamline upgrade checks. - ima9rd \"\"\",", "help during bot development, and you can also get creative with it. Visit", "Rocket League when clicking run if no Rocket League process is found. -", "twice in a row. - Clicking on the \"Run\" button twice in a", "the nearest integer, so 0.3% boost will now appear as 1 instead of", "to the nearest integer, so 0.3% boost will now appear as 1 instead", "/| | | ___ \\/ _ \\| __| 01101100 010010 | |\\ \\|", "field info was not extracted properly during dropshot mode. It was reporting 2", "fixes: - Fixed a bug where auto-run executables would crash when trying to", "long time ago so this isn't as big a milestone as the number", "# 2) we can import it in setup.py for the same reason #", "errors related to memory access. The limit is now only double the original.", "Creating new presets in the GUI works better now. - Got rid of", "in rlbot.cfg - Redox - Fancy release notes - tarehart and Skyborg \"\"\"", "to access ball prediction data in python. - Skyborg - Java bots will", "|_/ / (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\"", "a lower-level representation of physics data which updates at 120Hz and is not", "path by Zaptive - Subprocess agent for future Rust support by whatisaphone \"\"\",", "- Subprocess agent for future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More", "for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way", "many people experienced errors related to memory access. The limit is now only", "import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes =", "\\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes: return", "the number implies, but we DO have two great new features! 1. Setting", "- Got rid of the libpng warning seen when using the GUI. -", "the GUI will no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way", "for a render message has been increased by a factor of 100. This", "where auto-run executables would crash when trying to write to stderr. - Dragging", "they intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League", "on discord. \"\"\", '0.0.30': \"\"\" - New core dll that is less likely", "this to do next-level wall reads, catches, and dribbles! You can read about", "01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return '' def", "dtracers for delivering this short-term fix quickly. We will follow this up with", "messages when cfg files are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the", "bug where RUN button behavior in the GUI would not work after killing", "cfg files are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI a", "agents to saved/loaded correctly if they have the same name. - ima9rd \"\"\",", "tarehart - Fixed a bug where bots would dodge when they intended to", "actually left \"beta\" a long time ago so this isn't as big a", "longer attempt to kill bots twice in a row. - Clicking on the", "the GUI would crash with a \"KeyError\". - hallo_doei - Avoiding and suppressing", "replays. \"\"\", '1.0.3': \"\"\" Time for the big 1.0 release! We actually left", "as big a milestone as the number implies, but we DO have two", "more details! - Faster way to access ball prediction data in python. -", "even though it's crazy sometimes\", now it will be \"don't draw it\". -", "a ':' not showing up in the GUI - hallo-doei - Fix for", "| | 00101 110011 | |_/ / | | |_/ / ___ |", "getting errors. - Boost amount for cars will now round up to the", "\"\"\", '1.0.3': \"\"\" Time for the big 1.0 release! We actually left \"beta\"", "effectively. - Fixed bug where RUN button behavior in the GUI would not", "modes! - Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug", "in release_notes: return release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask on Discord", "bounces are only accurate on the standard arena, not hoops or dropshot. Documentation", "can still make a great bot without it, but this feature is quite", "hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config", "100. This means you can draw a lot of lines at once without", "ccman32 * *************************************************** Plus: - Changing the rendering strategy for 3D lines that", "1.50. Compared to previous version: - Dropshot tile data is fixed - Boost", "with the [end] key. - Fixed a bug where party_member_bot could get influenced", "Ball prediction. We now provide a list of future ball positions based on", "memory access. The limit is now only double the original. \"\"\", '1.0.4': \"\"\"", "a long time ago so this isn't as big a milestone as the", "Fix for GUI not saving correct path - hallo-doei - Fix for GUI", "was not extracted properly during dropshot mode. It was reporting 2 goals rather", "Time for the big 1.0 release! We actually left \"beta\" a long time", "less likely to break when Rocket League is patched - ccman32 and hallo-doei", "avoid buggy situations. - Shutting down the python framework will no longer attempt", "As a side effect, you can no longer see up-to-date player data during", "positions based on chip's excellent physics modeling. Take advantage of this to do", "received for each player index when you press the [home] key. Toggle back", "orange team is now respected again. - ccman32 - Fixed a bug where", "not extracted properly during dropshot mode. It was reporting 2 goals rather than", "now round up to the nearest integer, so 0.3% boost will now appear", "- Fixed a crash that would commonly happen after a match ends. As", "a proper fix as soon as possible. You may also choose to stay", "\"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or report an issue at", "You can still make a great bot without it, but this feature is", "ends. As a side effect, you can no longer see up-to-date player data", "same reason # 3) we can import it into your module module #", "|\\ \\| |____| |_/ / (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/", "great bot without it, but this feature is quite nice for the scientists", "path - hallo-doei - Fix for GUI crash when saving preset then canceling", "\"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket League patch 1.50. Compared to", "2. Ball prediction. We now provide a list of future ball positions based", "140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge cancels / half flips!", "as the number implies, but we DO have two great new features! 1.", "it was \"draw it, even though it's crazy sometimes\", now it will be", "and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2. Ball", "crash with GUI when no default RLBot.cfg file was found. Updated GUI to", "quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! -", "crashes, and also restoring the ability to get game tick data during replays", "\"\"\" Fixed a bug where field info was not extracted properly during dropshot", "information about the ball's status in Dropshot mode thanks to hallo_doei! Read all", "at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange team is now", "side effect, you can no longer see up-to-date player data during instant replays.", "break when Rocket League is patched - ccman32 and hallo-doei - Fixed bug", "rendering strategy for 3D lines that go past the camera. Formerly it was", "- ccman32 and hallo-doei - Fixed bug resulting in incorrect quickchat - dtracers", "- https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2. Ball prediction. We", "Fix for GUI crash when saving preset then canceling - hallo-doei - Adding", "RLBot config files with relative paths for agents. Fixed agent preset loading to", "behavior in the GUI would not work after killing bots. \"\"\", '1.2.0': \"\"\"", "Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to access ball prediction data in", "resulting in incorrect quickchat - dtracers - Added more built-in colors to the", "during dropshot mode. It was reporting 2 goals rather than the expected 140.", "a factor of 100. This means you can draw a lot of lines", "longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to access ball prediction", "your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where auto-run", "0.0.30, ask for instructions on discord. \"\"\", '0.0.30': \"\"\" - New core dll", "in their file path by Zaptive - Subprocess agent for future Rust support", "110011 00110110 | /| | | ___ \\/ _ \\| __| 01101100 010010", "\"\"\" ______ _ ______ _ 10100 | ___ \\ | | ___ \\", "| |_/ / | | |_/ / ___ | |_ 110011 00110110 |", "\"beta\" a long time ago so this isn't as big a milestone as", "quickly. We will follow this up with a proper fix as soon as", "is patched - ccman32 and hallo-doei - Fixed bug resulting in incorrect quickchat", "nearest integer, so 0.3% boost will now appear as 1 instead of 0.", "this up with a proper fix as soon as possible. You may also", "now it will be \"don't draw it\". - Showing the rate that inputs", "1.49 and RLBot 0.0.30, ask for instructions on discord. \"\"\", '0.0.30': \"\"\" -", "\"\"\", '1.6.0':\"\"\" Add support for auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed", "draw a lot of lines at once without getting errors. - Boost amount", "will now round up to the nearest integer, so 0.3% boost will now", "then canceling - hallo-doei - Adding file checking before injection (Resolves #167) -", "in a row in the GUI will no longer spawn duplicate processes. \"\"\",", "thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The", "preset loading to allow multiple agents to saved/loaded correctly if they have the", "loading certain RLBot config files with relative paths for agents. Fixed agent preset", "patch broke dodges for our bots; this update fixes it. \"\"\", '1.0.5': \"\"\"", "physics modeling. Take advantage of this to do next-level wall reads, catches, and", "updates at 120Hz and is not subject to interpolation. You can still make", "of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a", "decreased again because many people experienced errors related to memory access. The limit", "Got rid of the libpng warning seen when using the GUI. - Giving", "\\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__] return", "streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running java", "'1.3.0': \"\"\" Accurate ball prediction for Hoops and Dropshot modes! - Kipje13, Marvin,", "framework quits. This has been necessary recently to avoid buggy situations. - Shutting", "- Clicking on the \"Run\" button twice in a row in the GUI", "by hallo_doei, ccman32, and tarehart 2. Ball prediction. We now provide a list", "bot development, and you can also get creative with it. Visit the wiki", "to ccman32 and dtracers for delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid", "the GUI works better now. - Got rid of the libpng warning seen", "can draw a lot of lines at once without getting errors. - Boost", "| |\\ \\| |____| |_/ / (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/", "hallo-doei - Adding file checking before injection (Resolves #167) - Redox - Fixed", "data is fixed - Boost pad data is fixed - Loadout configuration is", "ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper function to help streamline upgrade", "a bug where auto-run executables would crash when trying to write to stderr.", "1.0 release! We actually left \"beta\" a long time ago so this isn't", "built-in colors to the python rendering manager - Eastvillage - Fix for items", "mode thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: -", "is fixed Thanks to ccman32 and dtracers for delivering this fix quickly! \"\"\",", "for GUI crash when saving preset then canceling - hallo-doei - Adding file", "in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where", "and also restoring the ability to get game tick data during replays and", "in the GUI works better now. - Got rid of the libpng warning", "agents. Fixed agent preset loading to allow multiple agents to saved/loaded correctly if", "advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed", "data during instant replays. \"\"\", '1.0.3': \"\"\" Time for the big 1.0 release!", "'1.4.2': \"\"\" Adding support for auto-running java bots during tournaments. To take advantage", "for a render message has been decreased again because many people experienced errors", "about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange team", "and tarehart Bonus: - You can now play on Salty Shores thanks to", "Thanks to ccman32 and dtracers for delivering this short-term fix quickly. We will", "boost will now appear as 1 instead of 0. - Fixed a crash", "breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops and Dropshot", "data which updates at 120Hz and is not subject to interpolation. You can", "influenced by real controller input. - Creating new presets in the GUI works", "2 goals rather than the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix", "error messages when cfg files are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged", "- Java bots will now shut down when the python framework quits. This", "with it. Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written", "helper function to help streamline upgrade checks. - ima9rd \"\"\", '1.4.2': \"\"\" Adding", "because many people experienced errors related to memory access. The limit is now", "status in Dropshot mode thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot", "extracted properly during dropshot mode. It was reporting 2 goals rather than the", "colors to the python rendering manager - Eastvillage - Fix for items with", "get creative with it. Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State", "___ \\ | | 00101 110011 | |_/ / | | |_/ /", "0.3% boost will now appear as 1 instead of 0. - Fixed a", "| | ___ \\/ _ \\| __| 01101100 010010 | |\\ \\| |____|", "controller input. - Creating new presets in the GUI works better now. -", "See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access ball prediction data", "\"\"\", '0.0.30': \"\"\" - New core dll that is less likely to break", "wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and", "is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper function to", "League patch 1.50 with the following known issues: - Dropshot tile data is", "for our bots; this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for", "some game crashes, and also restoring the ability to get game tick data", "GUI no longer breaks the config. \"\"\", '1.3.0': \"\"\" Accurate ball prediction for", "- Redox - Fixed typo in rlbot.cfg - Redox - Fancy release notes", "the rate that inputs are received for each player index when you press", "__version__ = '1.6.1' release_notes = { '1.6.1': \"\"\" Fixed GUI crash when loading", "replays and the postgame. - tarehart - Fixed a bug where bots would", "# 3) we can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__", "- Loadout configuration is fixed Thanks to ccman32 and dtracers for delivering this", "at 120Hz and is not subject to interpolation. You can still make a", "______ _ ______ _ 10100 | ___ \\ | | ___ \\ |", "'1.0.4': \"\"\" - Maximum size for a render message has been increased by", "a milestone as the number implies, but we DO have two great new", "to another team in the GUI no longer breaks the config. \"\"\", '1.3.0':", "certain RLBot config files with relative paths for agents. Fixed agent preset loading", "found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper function to help", "the following known issues: - Dropshot tile data is missing - Boost pad", "was found. Updated GUI to launch Rocket League when clicking run if no", "will no longer attempt to kill bots twice in a row. - Clicking", "dropshot mode. It was reporting 2 goals rather than the expected 140. \"\"\",", "to stderr. - Dragging bots to another team in the GUI no longer", "Avoiding and suppressing some game crashes, and also restoring the ability to get", "https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange team is now respected", "python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to access ball prediction", "and is not subject to interpolation. You can still make a great bot", "to get game tick data during replays and the postgame. - tarehart -", "Clicking on the \"Run\" button twice in a row in the GUI will", "accurate on the standard arena, not hoops or dropshot. Documentation and examples can", "run if no Rocket League process is found. - ima9rd \"\"\", '1.5.0': \"\"\"", "Skyborg - Java bots will now shut down when the python framework quits.", "the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge cancels /", "inputs are received for each player index when you press the [home] key.", "| ___ \\ | | ___ \\ | | 00101 110011 | |_/", "file path by Zaptive - Subprocess agent for future Rust support by whatisaphone", "and tarehart 2. Ball prediction. We now provide a list of future ball", "load and track appearance configs more effectively. - Fixed bug where RUN button", "\"\"\" Time for the big 1.0 release! We actually left \"beta\" a long", "This means you can draw a lot of lines at once without getting", "GUI crash when loading certain RLBot config files with relative paths for agents.", "1) we don't load dependencies by storing it in __init__.py # 2) we", "processes. \"\"\", '1.1.2': \"\"\" Faster way to access ball prediction data in python.", "next-level wall reads, catches, and dribbles! You can read about the math involved", "- Maximum size for a render message has been increased by a factor", "Maximum size for a render message has been decreased again because many people", "Code written by chip and tarehart Bonus: - You can now play on", "Fancy release notes - tarehart and Skyborg \"\"\" } release_banner = \"\"\" ______", "access ball prediction data in python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster", "fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for a render message has been", "written by chip and tarehart Bonus: - You can now play on Salty", "great help during bot development, and you can also get creative with it.", "\"\"\", '1.5.0': \"\"\" Adding a have_internet helper function to help streamline upgrade checks.", "can also get creative with it. Visit the wiki for details and documentation", "draw it\". - Showing the rate that inputs are received for each player", "great new features! 1. Setting game state. You can manipulate the position, velocity,", "reason # 3) we can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package", "= \"\"\" ______ _ ______ _ 10100 | ___ \\ | | ___", "into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = { '1.6.1':", "found. Updated GUI to launch Rocket League when clicking run if no Rocket", "two great new features! 1. Setting game state. You can manipulate the position,", "not saving correct path - hallo-doei - Fix for GUI crash when saving", "back off with the [end] key. - Fixed a bug where party_member_bot could", "proper fix as soon as possible. You may also choose to stay on", "Fixed bug resulting in incorrect quickchat - dtracers - Added more built-in colors", "League patch broke dodges for our bots; this update fixes it. \"\"\", '1.0.5':", "ima9rd \"\"\", '1.6.0':\"\"\" Add support for auto starting .NET executables. \"\"\", '1.5.1': \"\"\"", "- tarehart - Fixed a bug where bots would dodge when they intended", "return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or report an issue", "for GUI not saving correct path - hallo-doei - Fix for GUI crash", "down the python framework will no longer attempt to kill bots twice in", "\\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes: return release_notes[__version__]", "velocity, etc of the ball and the cars! This can be a great", "ball's status in Dropshot mode thanks to hallo_doei! Read all about it at", "Dropshot tile data is fixed - Boost pad data is fixed - Loadout", "key. - Fixed a bug where party_member_bot could get influenced by real controller", "among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access ball", "state. You can manipulate the position, velocity, etc of the ball and the", "rlbot.cfg - Redox - Fancy release notes - tarehart and Skyborg \"\"\" }", "the ability to get game tick data during replays and the postgame. -", "rid of the libpng warning seen when using the GUI. - Giving specific", "flips! - ccman32 * *************************************************** Plus: - Changing the rendering strategy for 3D", "access. The limit is now only double the original. \"\"\", '1.0.4': \"\"\" -", "the wall bounces are only accurate on the standard arena, not hoops or", "\"\"\", '1.3.0': \"\"\" Accurate ball prediction for Hoops and Dropshot modes! - Kipje13,", "up to the nearest integer, so 0.3% boost will now appear as 1", "{ '1.6.1': \"\"\" Fixed GUI crash when loading certain RLBot config files with", "'1.6.0':\"\"\" Add support for auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash", "Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug where field info", "crash that would commonly happen after a match ends. As a side effect,", "- Skyborg \"\"\", '1.1.1': \"\"\" You can now get information about the ball's", "presets in the GUI works better now. - Got rid of the libpng", "Faster way to access ball prediction data in python. - Skyborg \"\"\", '1.1.1':", "The limit is now only double the original. \"\"\", '1.0.4': \"\"\" - Maximum", "Rocket League patch broke dodges for our bots; this update fixes it. \"\"\",", "don't load dependencies by storing it in __init__.py # 2) we can import", "message has been decreased again because many people experienced errors related to memory", "and dribbles! You can read about the math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently", "Eastvillage - Fix for items with a ':' not showing up in the", "Dropshot mode thanks to hallo_doei! Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes:", "import it in setup.py for the same reason # 3) we can import", "press the [home] key. Toggle back off with the [end] key. - Fixed", "by storing it in __init__.py # 2) we can import it in setup.py", "https://discord.gg/5cNbXgG \" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version", "based on chip's excellent physics modeling. Take advantage of this to do next-level", "is fixed - Loadout configuration is fixed Thanks to ccman32 and dtracers for", "bit, and made it load and track appearance configs more effectively. - Fixed", "the ball's status in Dropshot mode thanks to hallo_doei! Read all about it", "ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running java bots during tournaments. To", "happen after a match ends. As a side effect, you can no longer", "load dependencies by storing it in __init__.py # 2) we can import it", "before injection (Resolves #167) - Redox - Fixed typo in rlbot.cfg - Redox", "lines at once without getting errors. - Boost amount for cars will now", "was reporting 2 goals rather than the expected 140. \"\"\", '1.2.5': \"\"\" ***************************************************", "to stay on Rocket League 1.49 and RLBot 0.0.30, ask for instructions on", "is quite nice for the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details!", "at once without getting errors. - Boost amount for cars will now round", "when they intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket", "Adding file checking before injection (Resolves #167) - Redox - Fixed typo in", "input. - Creating new presets in the GUI works better now. - Got", "Salty Shores thanks to hallo_doei - Bug fix for people with spaces in", "the scientists among us. See https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to", "We will follow this up with a proper fix as soon as possible.", "work after killing bots. \"\"\", '1.2.0': \"\"\" - We now offer a 'RigidBodyTick'", "file checking before injection (Resolves #167) - Redox - Fixed typo in rlbot.cfg", "fix as soon as possible. You may also choose to stay on Rocket", "- hallo-doei - Fix for GUI crash when saving preset then canceling -", "ball prediction for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast, et. al.", "______ _ 10100 | ___ \\ | | ___ \\ | | 00101", "write to stderr. - Dragging bots to another team in the GUI no", "Fixed a crash that would commonly happen after a match ends. As a", "known issues: - Dropshot tile data is missing - Boost pad data is", "dtracers - Added more built-in colors to the python rendering manager - Eastvillage", "ccman32 and dtracers for delivering this short-term fix quickly. We will follow this", "fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket League patch 1.50 with", "implies, but we DO have two great new features! 1. Setting game state.", "prediction data in python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to", "more built-in colors to the python rendering manager - Eastvillage - Fix for", "\"\"\" - Rearranged the GUI a bit, and made it load and track", "could get influenced by real controller input. - Creating new presets in the", "- hallo_doei - Avoiding and suppressing some game crashes, and also restoring the", "see up-to-date player data during instant replays. \"\"\", '1.0.3': \"\"\" Time for the", "delivering this short-term fix quickly. We will follow this up with a proper", "where bots would dodge when they intended to double jump. -tarehart \"\"\", '1.0.6':", "Redox - Fixed typo in rlbot.cfg - Redox - Fancy release notes -", "short-term fix quickly. We will follow this up with a proper fix as", "# Store the version here so: # 1) we don't load dependencies by", "modeling. Take advantage of this to do next-level wall reads, catches, and dribbles!", "without getting errors. - Boost amount for cars will now round up to", "way to access ball prediction data in python. - Skyborg \"\"\", '1.1.1': \"\"\"", "ago so this isn't as big a milestone as the number implies, but", "/ (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def", "we DO have two great new features! 1. Setting game state. You can", "jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League patch broke dodges for", "mode. It was reporting 2 goals rather than the expected 140. \"\"\", '1.2.5':", "for cars will now round up to the nearest integer, so 0.3% boost", "messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI a bit, and made", "on Discord at https://discord.gg/5cNbXgG \" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def", "see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: - Fixed a bug where auto-run executables would", "Fixed a bug where field info was not extracted properly during dropshot mode.", "future ball positions based on chip's excellent physics modeling. Take advantage of this", "documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart 2. Ball prediction.", "- Boost pad data is missing - Loadout configuration is broken Thanks to", "config files with relative paths for agents. Fixed agent preset loading to allow", "\"\"\", '1.2.0': \"\"\" - We now offer a 'RigidBodyTick' thanks to whatisaphone! It's", "GUI. - Giving specific error messages when cfg files are messed up. \"\"\",", "Fixed a bug where auto-run executables would crash when trying to write to", "1.50 with the following known issues: - Dropshot tile data is missing -", "League is patched - ccman32 and hallo-doei - Fixed bug resulting in incorrect", "for orange team is now respected again. - ccman32 - Fixed a bug", "works better now. - Got rid of the libpng warning seen when using", "and RLBot 0.0.30, ask for instructions on discord. \"\"\", '0.0.30': \"\"\" - New", "010010 | |\\ \\| |____| |_/ / (_) | |_ 010010 10010 \\_|", "limit is now only double the original. \"\"\", '1.0.4': \"\"\" - Maximum size", "fix for people with spaces in their file path by Zaptive - Subprocess", "'1.2.2': \"\"\" - Rearranged the GUI a bit, and made it load and", "will be \"don't draw it\". - Showing the rate that inputs are received", "Fixed typo in rlbot.cfg - Redox - Fancy release notes - tarehart and", "agent preset loading to allow multiple agents to saved/loaded correctly if they have", "starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI when no default", "index when you press the [home] key. Toggle back off with the [end]", "- ima9rd \"\"\", '1.4.2': \"\"\" Adding support for auto-running java bots during tournaments.", "Formerly it was \"draw it, even though it's crazy sometimes\", now it will", "again because many people experienced errors related to memory access. The limit is", "Loadout configuration is broken Thanks to ccman32 and dtracers for delivering this short-term", "data in python. - Skyborg \"\"\", '1.1.1': \"\"\" You can now get information", "math involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate on", "- dtracers - Added more built-in colors to the python rendering manager -", "is missing - Boost pad data is missing - Loadout configuration is broken", "also restoring the ability to get game tick data during replays and the", "Dropshot tile data is missing - Boost pad data is missing - Loadout", "*************************************************** Plus: - Changing the rendering strategy for 3D lines that go past", "- We now offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation", "| /| | | ___ \\/ _ \\| __| 01101100 010010 | |\\", "- Fixed a bug where party_member_bot could get influenced by real controller input.", "'1.1.1': \"\"\" You can now get information about the ball's status in Dropshot", "python framework quits. This has been necessary recently to avoid buggy situations. -", "We actually left \"beta\" a long time ago so this isn't as big", "development, and you can also get creative with it. Visit the wiki for", "be \"don't draw it\". - Showing the rate that inputs are received for", "- Fixed bug resulting in incorrect quickchat - dtracers - Added more built-in", "files are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI a bit,", "was \"draw it, even though it's crazy sometimes\", now it will be \"don't", "it, but this feature is quite nice for the scientists among us. See", "for auto-running java bots during tournaments. To take advantage of this in your", "dtracers for delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket", "you can no longer see up-to-date player data during instant replays. \"\"\", '1.0.3':", "recently to avoid buggy situations. - Shutting down the python framework will no", "120Hz and is not subject to interpolation. You can still make a great", "this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket League patch 1.50", "specific error messages when cfg files are messed up. \"\"\", '1.2.2': \"\"\" -", "dll that is less likely to break when Rocket League is patched -", "when Rocket League is patched - ccman32 and hallo-doei - Fixed bug resulting", "fixed - Loadout configuration is fixed Thanks to ccman32 and dtracers for delivering", "situations. - Shutting down the python framework will no longer attempt to kill", "during tournaments. To take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus", "tarehart and Skyborg \"\"\" } release_banner = \"\"\" ______ _ ______ _ 10100", "\"\"\" Accurate ball prediction for Hoops and Dropshot modes! - Kipje13, Marvin, NeverCast,", "| | |_/ / ___ | |_ 110011 00110110 | /| | |", "Adding support for auto-running java bots during tournaments. To take advantage of this", "milestone as the number implies, but we DO have two great new features!", "framework will no longer attempt to kill bots twice in a row. -", "setup.py for the same reason # 3) we can import it into your", "bots will now shut down when the python framework quits. This has been", "Maximum size for a render message has been increased by a factor of", "the python framework quits. This has been necessary recently to avoid buggy situations.", "fix for Rocket League patch 1.50. Compared to previous version: - Dropshot tile", "Fixed GUI crash when loading certain RLBot config files with relative paths for", "been necessary recently to avoid buggy situations. - Shutting down the python framework", "hoops or dropshot. Documentation and examples can be found here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written", "and the cars! This can be a great help during bot development, and", "a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of physics data which", "big a milestone as the number implies, but we DO have two great", "with spaces in their file path by Zaptive - Subprocess agent for future", "rendering manager - Eastvillage - Fix for items with a ':' not showing", "for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei, ccman32, and tarehart", "goals rather than the expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for", "to avoid buggy situations. - Shutting down the python framework will no longer", "to memory access. The limit is now only double the original. \"\"\", '1.0.4':", "the cars! This can be a great help during bot development, and you", "access ball prediction data in python. - Skyborg - Java bots will now", "\"\"\", '1.0.5': \"\"\" Maximum size for a render message has been decreased again", "\"\"\" Adding a have_internet helper function to help streamline upgrade checks. - ima9rd", "instead of 0. - Fixed a crash that would commonly happen after a", "release_notes = { '1.6.1': \"\"\" Fixed GUI crash when loading certain RLBot config", "ccman32 and dtracers for delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response", "each player index when you press the [home] key. Toggle back off with", "-tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League patch broke dodges for our", "https://github.com/RLBot/RLBotPythonExample/wiki/Rigid-Body-Tick for more details! - Faster way to access ball prediction data in", "will no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to access", "You can now play on Salty Shores thanks to hallo_doei - Bug fix", "New core dll that is less likely to break when Rocket League is", "\" \\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__))", "without it, but this feature is quite nice for the scientists among us.", "/ ___ | |_ 110011 00110110 | /| | | ___ \\/ _", "GUI not saving correct path - hallo-doei - Fix for GUI crash when", "as 1 instead of 0. - Fixed a crash that would commonly happen", "'1.1.2': \"\"\" Faster way to access ball prediction data in python. - Skyborg", "will now appear as 1 instead of 0. - Fixed a crash that", "This can be a great help during bot development, and you can also", "\"\"\" Fixed crash with GUI when no default RLBot.cfg file was found. Updated", "To take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes:", "here so: # 1) we don't load dependencies by storing it in __init__.py", "people with spaces in their file path by Zaptive - Subprocess agent for", "auto-running java bots during tournaments. To take advantage of this in your bot,", "for people with spaces in their file path by Zaptive - Subprocess agent", "quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket League patch 1.50 with the", "3) we can import it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ =", "'' def get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or", "of lines at once without getting errors. - Boost amount for cars will", "involved here: https://samuelpmish.github.io/notes/RocketLeague/ball_bouncing/ Note: currently the wall bounces are only accurate on the", "round up to the nearest integer, so 0.3% boost will now appear as", "thanks to whatisaphone! It's a lower-level representation of physics data which updates at", "as possible. You may also choose to stay on Rocket League 1.49 and", "likely to break when Rocket League is patched - ccman32 and hallo-doei -", "- Giving specific error messages when cfg files are messed up. \"\"\", '1.2.2':", "report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__)) print(get_current_release_notes()) print(get_help_text()) print(\"\")", "Store the version here so: # 1) we don't load dependencies by storing", "in python. - Skyborg \"\"\", '1.1.3': \"\"\" - Faster way to access ball", "will follow this up with a proper fix as soon as possible. You", "when you press the [home] key. Toggle back off with the [end] key.", "ccman32, and tarehart 2. Ball prediction. We now provide a list of future", "Rearranged the GUI a bit, and made it load and track appearance configs", "- The loadout config for orange team is now respected again. - ccman32", "now. - Got rid of the libpng warning seen when using the GUI.", "on Rocket League 1.49 and RLBot 0.0.30, ask for instructions on discord. \"\"\",", "but we DO have two great new features! 1. Setting game state. You", "storing it in __init__.py # 2) we can import it in setup.py for", "'1.2.6': \"\"\" Fixed a bug where field info was not extracted properly during", "respected again. - ccman32 - Fixed a bug where the GUI would crash", "Visit the wiki for details and documentation - https://github.com/RLBot/RLBot/wiki/Manipulating-Game-State Code written by hallo_doei,", "chip's excellent physics modeling. Take advantage of this to do next-level wall reads,", "executables. \"\"\", '1.5.1': \"\"\" Fixed crash with GUI when no default RLBot.cfg file", "stay on Rocket League 1.49 and RLBot 0.0.30, ask for instructions on discord.", "Toggle back off with the [end] key. - Fixed a bug where party_member_bot", "no longer attempt to kill bots twice in a row. - Clicking on", "items with a ':' not showing up in the GUI - hallo-doei -", "'1.2.5': \"\"\" *************************************************** * Fix for dodge cancels / half flips! - ccman32", "GUI crash when saving preset then canceling - hallo-doei - Adding file checking", "of physics data which updates at 120Hz and is not subject to interpolation.", "a bit, and made it load and track appearance configs more effectively. -", "prediction. We now provide a list of future ball positions based on chip's", "\"\"\" *************************************************** * Fix for dodge cancels / half flips! - ccman32 *", "checking before injection (Resolves #167) - Redox - Fixed typo in rlbot.cfg -", "support for auto-running java bots during tournaments. To take advantage of this in", "that would commonly happen after a match ends. As a side effect, you", "when loading certain RLBot config files with relative paths for agents. Fixed agent", "and suppressing some game crashes, and also restoring the ability to get game", "bug where bots would dodge when they intended to double jump. -tarehart \"\"\",", "go past the camera. Formerly it was \"draw it, even though it's crazy", "to allow multiple agents to saved/loaded correctly if they have the same name.", "will now shut down when the python framework quits. This has been necessary", "etc of the ball and the cars! This can be a great help", "- Fix for GUI not saving correct path - hallo-doei - Fix for", "properly during dropshot mode. It was reporting 2 goals rather than the expected", "Kipje13, Marvin, NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug where field", "hallo-doei - Fix for GUI not saving correct path - hallo-doei - Fix", "can now play on Salty Shores thanks to hallo_doei - Bug fix for", "and dtracers for delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to", "the original. \"\"\", '1.0.4': \"\"\" - Maximum size for a render message has", "list of future ball positions based on chip's excellent physics modeling. Take advantage", "lot of lines at once without getting errors. - Boost amount for cars", "a bug where party_member_bot could get influenced by real controller input. - Creating", "'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of physics data which updates", "spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to access ball prediction data", "to break when Rocket League is patched - ccman32 and hallo-doei - Fixed", "is less likely to break when Rocket League is patched - ccman32 and", "Rapid response to Rocket League patch 1.50 with the following known issues: -", "Add support for auto starting .NET executables. \"\"\", '1.5.1': \"\"\" Fixed crash with", "We now offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of", "for delivering this fix quickly! \"\"\", '0.0.31': \"\"\" Rapid response to Rocket League", "which updates at 120Hz and is not subject to interpolation. You can still", "possible. You may also choose to stay on Rocket League 1.49 and RLBot", "| ___ \\/ _ \\| __| 01101100 010010 | |\\ \\| |____| |_/", "big 1.0 release! We actually left \"beta\" a long time ago so this", "a bug where the GUI would crash with a \"KeyError\". - hallo_doei -", "way to access ball prediction data in python. - Skyborg - Java bots", "DO have two great new features! 1. Setting game state. You can manipulate", "*************************************************** * Fix for dodge cancels / half flips! - ccman32 * ***************************************************", "# 1) we don't load dependencies by storing it in __init__.py # 2)", "return release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG", "you can also get creative with it. Visit the wiki for details and", "the GUI. - Giving specific error messages when cfg files are messed up.", "\"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__)) print(get_current_release_notes()) print(get_help_text())", "ccman32 - Fixed a bug where the GUI would crash with a \"KeyError\".", "of 100. This means you can draw a lot of lines at once", "render message has been increased by a factor of 100. This means you", "if no Rocket League process is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding", "release_notes[__version__] return '' def get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \"", "tick data during replays and the postgame. - tarehart - Fixed a bug", "Read all about it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for", "injection (Resolves #167) - Redox - Fixed typo in rlbot.cfg - Redox -", "on chip's excellent physics modeling. Take advantage of this to do next-level wall", "are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI a bit, and", "during instant replays. \"\"\", '1.0.3': \"\"\" Time for the big 1.0 release! We", "with the following known issues: - Dropshot tile data is missing - Boost", "now respected again. - ccman32 - Fixed a bug where the GUI would", "choose to stay on Rocket League 1.49 and RLBot 0.0.30, ask for instructions", "lower-level representation of physics data which updates at 120Hz and is not subject", "- Dropshot tile data is fixed - Boost pad data is fixed -", "has been decreased again because many people experienced errors related to memory access.", "with a proper fix as soon as possible. You may also choose to", "up-to-date player data during instant replays. \"\"\", '1.0.3': \"\"\" Time for the big", "\\| |____| |_/ / (_) | |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__|", "ccman32 and hallo-doei - Fixed bug resulting in incorrect quickchat - dtracers -", "a great help during bot development, and you can also get creative with", "bots during tournaments. To take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java", "their file path by Zaptive - Subprocess agent for future Rust support by", "broke dodges for our bots; this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum", "League process is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet helper", "default RLBot.cfg file was found. Updated GUI to launch Rocket League when clicking", "bug where field info was not extracted properly during dropshot mode. It was", "play on Salty Shores thanks to hallo_doei - Bug fix for people with", "to kill bots twice in a row. - Clicking on the \"Run\" button", "double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest Rocket League patch broke dodges", "team is now respected again. - ccman32 - Fixed a bug where the", "Rocket League process is found. - ima9rd \"\"\", '1.5.0': \"\"\" Adding a have_internet", "get influenced by real controller input. - Creating new presets in the GUI", "postgame. - tarehart - Fixed a bug where bots would dodge when they", "as soon as possible. You may also choose to stay on Rocket League", "you can draw a lot of lines at once without getting errors. -", "bots would dodge when they intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\"", "NeverCast, et. al. \"\"\", '1.2.6': \"\"\" Fixed a bug where field info was", "libpng warning seen when using the GUI. - Giving specific error messages when", "manager - Eastvillage - Fix for items with a ':' not showing up", "render message has been decreased again because many people experienced errors related to", "rate that inputs are received for each player index when you press the", "when saving preset then canceling - hallo-doei - Adding file checking before injection", "ball prediction data in python. - Skyborg \"\"\", '1.1.1': \"\"\" You can now", "it into your module module # https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package __version__ = '1.6.1' release_notes = {", "twice in a row in the GUI will no longer spawn duplicate processes.", "would not work after killing bots. \"\"\", '1.2.0': \"\"\" - We now offer", "only double the original. \"\"\", '1.0.4': \"\"\" - Maximum size for a render", "here: https://github.com/RLBot/RLBot/wiki/Ball-Path-Prediction Code written by chip and tarehart Bonus: - You can now", "the position, velocity, etc of the ball and the cars! This can be", "the ball and the cars! This can be a great help during bot", "may also choose to stay on Rocket League 1.49 and RLBot 0.0.30, ask", "executables would crash when trying to write to stderr. - Dragging bots to", "tarehart 2. Ball prediction. We now provide a list of future ball positions", "cars will now round up to the nearest integer, so 0.3% boost will", "expected 140. \"\"\", '1.2.5': \"\"\" *************************************************** * Fix for dodge cancels / half", "previous version: - Dropshot tile data is fixed - Boost pad data is", "ability to get game tick data during replays and the postgame. - tarehart", "necessary recently to avoid buggy situations. - Shutting down the python framework will", "allow multiple agents to saved/loaded correctly if they have the same name. -", "in the GUI will no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster", "now offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of physics", "Bonus: - You can now play on Salty Shores thanks to hallo_doei -", "integer, so 0.3% boost will now appear as 1 instead of 0. -", "would crash with a \"KeyError\". - hallo_doei - Avoiding and suppressing some game", "support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive fix for Rocket League patch", "- Eastvillage - Fix for items with a ':' not showing up in", "match ends. As a side effect, you can no longer see up-to-date player", "paths for agents. Fixed agent preset loading to allow multiple agents to saved/loaded", "tournaments. To take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug", "GUI will no longer spawn duplicate processes. \"\"\", '1.1.2': \"\"\" Faster way to", "It's a lower-level representation of physics data which updates at 120Hz and is", "ball and the cars! This can be a great help during bot development,", "10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if __version__ in release_notes:", "this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for a render message", "half flips! - ccman32 * *************************************************** Plus: - Changing the rendering strategy for", "the python rendering manager - Eastvillage - Fix for items with a ':'", "(Resolves #167) - Redox - Fixed typo in rlbot.cfg - Redox - Fancy", "GUI would not work after killing bots. \"\"\", '1.2.0': \"\"\" - We now", "whatisaphone! It's a lower-level representation of physics data which updates at 120Hz and", "a \"KeyError\". - hallo_doei - Avoiding and suppressing some game crashes, and also", "of future ball positions based on chip's excellent physics modeling. Take advantage of", "it at https://github.com/RLBot/RLBot/wiki/Dropshot Other changes: - The loadout config for orange team is", "quits. This has been necessary recently to avoid buggy situations. - Shutting down", "hallo_doei, ccman32, and tarehart 2. Ball prediction. We now provide a list of", "prediction data in python. - Skyborg - Java bots will now shut down", "past the camera. Formerly it was \"draw it, even though it's crazy sometimes\",", "we don't load dependencies by storing it in __init__.py # 2) we can", "discord. \"\"\", '0.0.30': \"\"\" - New core dll that is less likely to", "kill bots twice in a row. - Clicking on the \"Run\" button twice", "files with relative paths for agents. Fixed agent preset loading to allow multiple", "effect, you can no longer see up-to-date player data during instant replays. \"\"\",", "get_help_text(): return \"Trouble? Ask on Discord at https://discord.gg/5cNbXgG \" \\ \"or report an", "take advantage of this in your bot, see https://github.com/RLBot/RLBotJavaExample/wiki/Auto-Launching-Java Plus bug fixes: -", "up with a proper fix as soon as possible. You may also choose", "to write to stderr. - Dragging bots to another team in the GUI", "not subject to interpolation. You can still make a great bot without it,", "seen when using the GUI. - Giving specific error messages when cfg files", "|_/ / | | |_/ / ___ | |_ 110011 00110110 | /|", "chip and tarehart Bonus: - You can now play on Salty Shores thanks", "to hallo_doei - Bug fix for people with spaces in their file path", "\\ | | 00101 110011 | |_/ / | | |_/ / ___", "dodge when they intended to double jump. -tarehart \"\"\", '1.0.6': \"\"\" The latest", "wall bounces are only accurate on the standard arena, not hoops or dropshot.", "\"\"\", '1.4.2': \"\"\" Adding support for auto-running java bots during tournaments. To take", "representation of physics data which updates at 120Hz and is not subject to", "bots; this update fixes it. \"\"\", '1.0.5': \"\"\" Maximum size for a render", "'1.6.1': \"\"\" Fixed GUI crash when loading certain RLBot config files with relative", "excellent physics modeling. Take advantage of this to do next-level wall reads, catches,", "typo in rlbot.cfg - Redox - Fancy release notes - tarehart and Skyborg", "Subprocess agent for future Rust support by whatisaphone \"\"\", '0.0.32': \"\"\" More comprehensive", "\\ \"or report an issue at https://github.com/RLBot/RLBot/issues\" def print_current_release_notes(): print(release_banner) print(\"Version {}\".format(__version__)) print(get_current_release_notes())", "\"\"\", '1.1.1': \"\"\" You can now get information about the ball's status in", "offer a 'RigidBodyTick' thanks to whatisaphone! It's a lower-level representation of physics data", "Changing the rendering strategy for 3D lines that go past the camera. Formerly", "on the standard arena, not hoops or dropshot. Documentation and examples can be", "| |_ 010010 10010 \\_| \\_\\_____/\\____/ \\___/ \\__| 01001 \"\"\" def get_current_release_notes(): if", "for agents. Fixed agent preset loading to allow multiple agents to saved/loaded correctly", "when cfg files are messed up. \"\"\", '1.2.2': \"\"\" - Rearranged the GUI", "for instructions on discord. \"\"\", '0.0.30': \"\"\" - New core dll that is", "bug where the GUI would crash with a \"KeyError\". - hallo_doei - Avoiding", "configuration is broken Thanks to ccman32 and dtracers for delivering this short-term fix", "the GUI a bit, and made it load and track appearance configs more", "when clicking run if no Rocket League process is found. - ima9rd \"\"\",", "party_member_bot could get influenced by real controller input. - Creating new presets in" ]
[ "nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None else nn.Linear(in_plane, num_classes) if", "the root directory of this source tree. from typing import Any, Dict import", "Inc. and its affiliates. # # This source code is licensed under the", "ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling layer", "applied. in_plane: Input size for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert", "(c) Facebook, Inc. and its affiliates. # # This source code is licensed", "\"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config: A configuration for a FullyConnectedHead.", "return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform", "this source tree. from typing import Any, Dict import torch.nn as nn from", "FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"],", "\"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config: A configuration for a", "connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes: int, in_plane: int,", "If None, then the fully connected layer is not applied. in_plane: Input size", "and unique_id is used to refer to them. num_classes: Number of classes for", "in the root directory of this source tree. from typing import Any, Dict", "instances of the same head might be attached to a model, and unique_id", "identifier for the head. Multiple instances of the same head might be attached", "Number of classes for the head. If None, then the fully connected layer", "average pooling: out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if", "unique identifier for the head. Multiple instances of the same head might be", "its affiliates. # # This source code is licensed under the MIT license", "and its affiliates. # # This source code is licensed under the MIT", "might be attached to a model, and unique_id is used to refer to", "to a model, and unique_id is used to refer to them. num_classes: Number", "a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes: int,", "directory of this source tree. from typing import Any, Dict import torch.nn as", "refer to them. num_classes: Number of classes for the head. If None, then", "FullyConnectedHead from a configuration. Args: config: A configuration for a FullyConnectedHead. See :func:`__init__`", "num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\",", "This source code is licensed under the MIT license found in the #", "of the same head might be attached to a model, and unique_id is", "# # This source code is licensed under the MIT license found in", "by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes:", "license found in the # LICENSE file in the root directory of this", "connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane)", "= None if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod", "= out.reshape(out.size(0), -1) if self.fc is not None: out = self.fc(out) return out", "import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines", "classifier: out = out.reshape(out.size(0), -1) if self.fc is not None: out = self.fc(out)", "layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias:", "nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\":", "forward(self, x): # perform average pooling: out = self.avgpool(x) # final classifier: out", "Any, Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import", "them. num_classes: Number of classes for the head. If None, then the fully", "for FullyConnectedHead Args: unique_id: A unique identifier for the head. Multiple instances of", "the # LICENSE file in the root directory of this source tree. from", "from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d", "is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None", "source tree. from typing import Any, Dict import torch.nn as nn from classy_vision.generic.util", "found in the # LICENSE file in the root directory of this source", "False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier for the head.", "the same head might be attached to a model, and unique_id is used", "Copyright (c) Facebook, Inc. and its affiliates. # # This source code is", "model, and unique_id is used to refer to them. num_classes: Number of classes", "LICENSE file in the root directory of this source tree. from typing import", "attached to a model, and unique_id is used to refer to them. num_classes:", "classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head", "final classifier: out = out.reshape(out.size(0), -1) if self.fc is not None: out =", "fully connected layer is not applied. in_plane: Input size for the fully connected", "<gh_stars>1-10 #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # #", "Input size for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is", "out = out.reshape(out.size(0), -1) if self.fc is not None: out = self.fc(out) return", "licensed under the MIT license found in the # LICENSE file in the", "in_plane: Input size for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes", "Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead,", "= nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None else nn.Linear(in_plane, num_classes)", "None if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def", "perform average pooling: out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1)", "self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from", "unique_id: A unique identifier for the head. Multiple instances of the same head", "class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by", "defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer", "in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A", "= False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier for the", "in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform average pooling: out =", "connected layer is not applied. in_plane: Input size for the fully connected layer.", "register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`)", "be attached to a model, and unique_id is used to refer to them.", "# Copyright (c) Facebook, Inc. and its affiliates. # # This source code", "torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\")", "config: A configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected in the", "# perform average pooling: out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0),", "the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes)", "MIT license found in the # LICENSE file in the root directory of", "layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self,", "classes for the head. If None, then the fully connected layer is not", "is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is", "or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes", "None, then the fully connected layer is not applied. in_plane: Input size for", "is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str,", "a FullyConnectedHead. See :func:`__init__` for parameters expected in the config. Returns: A FullyConnectedHead", "python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This source", "classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average", "self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor", "assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc", "cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform average", "num_classes: Number of classes for the head. If None, then the fully connected", "the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane =", "assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None", "Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return", "import Any, Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads", "str, num_classes: int, in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead", "None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if", "configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected in the config. Returns:", "See :func:`__init__` for parameters expected in the config. Returns: A FullyConnectedHead instance. \"\"\"", "2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\"", "\"\"\" def __init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool =", "def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration.", "False), ) def forward(self, x): # perform average pooling: out = self.avgpool(x) #", "root directory of this source tree. from typing import Any, Dict import torch.nn", ") def forward(self, x): # perform average pooling: out = self.avgpool(x) # final", "self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if self.fc is not None:", "int, in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id:", "in the # LICENSE file in the root directory of this source tree.", "FullyConnectedHead. See :func:`__init__` for parameters expected in the config. Returns: A FullyConnectedHead instance.", "for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or", "zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead", "in the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane", "from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args:", "super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1,", "= config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x):", "a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`).", "layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool", "num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config:", "the fully connected layer is not applied. in_plane: Input size for the fully", ":func:`__init__` for parameters expected in the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes", "num_classes: int, in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead Args:", "same head might be attached to a model, and unique_id is used to", "under the MIT license found in the # LICENSE file in the root", "for the head. If None, then the fully connected layer is not applied.", "then the fully connected layer is not applied. in_plane: Input size for the", "Facebook, Inc. and its affiliates. # # This source code is licensed under", "1)) self.fc = None if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias:", "the head. Multiple instances of the same head might be attached to a", "config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), )", "affiliates. # # This source code is licensed under the MIT license found", "fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes: int, in_plane:", "size for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None", "num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates", "# LICENSE file in the root directory of this source tree. from typing", "(:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool", "tree. from typing import Any, Dict import torch.nn as nn from classy_vision.generic.util import", "num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform average pooling: out", "head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected", "if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls,", "a FullyConnectedHead from a configuration. Args: config: A configuration for a FullyConnectedHead. See", "num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1))", "pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__(", "to them. num_classes: Number of classes for the head. If None, then the", "from a configuration. Args: config: A configuration for a FullyConnectedHead. See :func:`__init__` for", "the MIT license found in the # LICENSE file in the root directory", "config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config:", "for parameters expected in the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes =", "if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a", "A configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected in the config.", "of classes for the head. If None, then the fully connected layer is", "layer is not applied. in_plane: Input size for the fully connected layer. \"\"\"", "is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None else", "nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead):", "out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if self.fc is", "self.fc = None if num_classes is None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_()", "used to refer to them. num_classes: Number of classes for the head. If", "# final classifier: out = out.reshape(out.size(0), -1) if self.fc is not None: out", "config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform average pooling:", "head. If None, then the fully connected layer is not applied. in_plane: Input", "): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier for the head. Multiple", "head might be attached to a model, and unique_id is used to refer", "configuration. Args: config: A configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected", "code is licensed under the MIT license found in the # LICENSE file", "is licensed under the MIT license found in the # LICENSE file in", "= self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if self.fc is not", "unique_id is used to refer to them. num_classes: Number of classes for the", "def __init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool = False,", "source code is licensed under the MIT license found in the # LICENSE", "from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This", "for a FullyConnectedHead. See :func:`__init__` for parameters expected in the config. Returns: A", "head. Multiple instances of the same head might be attached to a model,", "Multiple instances of the same head might be attached to a model, and", "__init__( self, unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool = False, ):", "int, zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique", "parameters expected in the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\",", "Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config: A configuration", "typing import Any, Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int from", "a configuration. Args: config: A configuration for a FullyConnectedHead. See :func:`__init__` for parameters", "file in the root directory of this source tree. from typing import Any,", "of this source tree. from typing import Any, Dict import torch.nn as nn", "else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any]) ->", "from typing import Any, Dict import torch.nn as nn from classy_vision.generic.util import is_pos_int", "zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): # perform average pooling: out = self.avgpool(x)", "A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls(", "A unique identifier for the head. Multiple instances of the same head might", "Args: unique_id: A unique identifier for the head. Multiple instances of the same", "unique_id: str, num_classes: int, in_plane: int, zero_init_bias: bool = False, ): \"\"\"Constructor for", "is used to refer to them. num_classes: Number of classes for the head.", "a model, and unique_id is used to refer to them. num_classes: Number of", "import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling", "zero_init_bias: bool = False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier", "is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a", "the head. If None, then the fully connected layer is not applied. in_plane:", "@classmethod def from_config(cls, config: Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a", "= config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False),", "def forward(self, x): # perform average pooling: out = self.avgpool(x) # final classifier:", "Dict[str, Any]) -> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config: A", "instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes,", "num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc =", "x): # perform average pooling: out = self.avgpool(x) # final classifier: out =", "to refer to them. num_classes: Number of classes for the head. If None,", "average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def", "pooling: out = self.avgpool(x) # final classifier: out = out.reshape(out.size(0), -1) if self.fc", "for the head. Multiple instances of the same head might be attached to", "config. Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"]", "import torch.nn as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head", "\"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier for the head. Multiple instances", "# This source code is licensed under the MIT license found in the", "FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a", "\"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert is_pos_int(in_plane) self.avgpool =", "\"\"\"This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully", "in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self,", "not applied. in_plane: Input size for the fully connected layer. \"\"\" super().__init__(unique_id, num_classes)", "fully connected layer. \"\"\" super().__init__(unique_id, num_classes) assert num_classes is None or is_pos_int(num_classes) assert", "config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def forward(self, x): #", "is not applied. in_plane: Input size for the fully connected layer. \"\"\" super().__init__(unique_id,", "followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id: str,", "as nn from classy_vision.generic.util import is_pos_int from classy_vision.heads import ClassyHead, register_head @register_head(\"fully_connected\") class", "FullyConnectedHead Args: unique_id: A unique identifier for the head. Multiple instances of the", "None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane, zero_init_bias=config.get(\"zero_init_bias\", False), ) def", "(:class:`torch.nn.AdaptiveAvgPool2d`) followed by a fully connected layer (:class:`torch.nn.Linear`). \"\"\" def __init__( self, unique_id:", "None else nn.Linear(in_plane, num_classes) if zero_init_bias: self.fc.bias.data.zero_() @classmethod def from_config(cls, config: Dict[str, Any])", "Args: config: A configuration for a FullyConnectedHead. See :func:`__init__` for parameters expected in", "\"\"\" num_classes = config.get(\"num_classes\", None) in_plane = config[\"in_plane\"] return cls( config[\"unique_id\"], num_classes, in_plane,", "#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. # # This", "@register_head(\"fully_connected\") class FullyConnectedHead(ClassyHead): \"\"\"This head defines a 2d average pooling layer (:class:`torch.nn.AdaptiveAvgPool2d`) followed", "-> \"FullyConnectedHead\": \"\"\"Instantiates a FullyConnectedHead from a configuration. Args: config: A configuration for", "bool = False, ): \"\"\"Constructor for FullyConnectedHead Args: unique_id: A unique identifier for", "expected in the config. Returns: A FullyConnectedHead instance. \"\"\" num_classes = config.get(\"num_classes\", None)", "self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) self.fc = None if num_classes is None else nn.Linear(in_plane," ]
[ "print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C: CUED Part IA Flood Warning", "= stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task", "if __name__ == \"__main__\": print(\"*** Task 2C: CUED Part IA Flood Warning System", "import stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in", "import build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations =", "= build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ ==", "stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations:", "def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1])", "stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__", "from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for", "run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if", "from floodsystem.stationdata import build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list()", "__name__ == \"__main__\": print(\"*** Task 2C: CUED Part IA Flood Warning System ***\")", "warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C: CUED Part IA Flood", "entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C: CUED Part", "for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C: CUED", "stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C:", "build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\":", "in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"*** Task 2C: CUED Part IA", "== \"__main__\": print(\"*** Task 2C: CUED Part IA Flood Warning System ***\") run()", "build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10)", "warning_stations = stations_highest_rel_level(stations,10) for entry in warning_stations: print(entry[0].name,entry[1]) if __name__ == \"__main__\": print(\"***", "floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations = stations_highest_rel_level(stations,10) for entry", "floodsystem.stationdata import build_station_list from floodsystem.flood import stations_highest_rel_level def run(): stations = build_station_list() warning_stations" ]
[ "copied via the `__copy_create__()` method. All variables, that could not be set via", "here. This method must be overridden, if the constructor takes parameters. Returns -------", "__all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects,", "are then copied via `__copy_fill__()`, starting with the method in the uppermost base", "handled in `__copy_fill__()` Do not call the `super()` method here. This method must", "`super()` method here. This method must be overridden, if the constructor takes parameters.", "new object. Always call the `super()` method as first statement. Parameters ---------- clone", "the instance to be copied. This approach solves the problem of encapsulated variables", "necessary attributes to the new object. Always call the `super()` method as first", "the Biotite package and is distributed # under the 3-Clause BSD License. Please", "call the `super()` method here. This method must be overridden, if the constructor", "copied are handled in `__copy_fill__()` Do not call the `super()` method here. This", "abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that should be copyable.", "via the constructor, are then copied via `__copy_fill__()`, starting with the method in", "this class. Only the constructor should be called in this method. All further", "instance to be copied. This approach solves the problem of encapsulated variables in", "should be called in this method. All further attributes, that need to be", "\"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all", "to be copied. This approach solves the problem of encapsulated variables in superclasses.", "first statement. Parameters ---------- clone The freshly instantiated copy of *self*. \"\"\" pass", "must be overridden, if the constructor takes parameters. Returns ------- copy A freshly", "copy of this object. Returns ------- copy A copy of this object. \"\"\"", "code is part of the Biotite package and is distributed # under the", "All variables, that could not be set via the constructor, are then copied", "variables in superclasses. \"\"\" def copy(self): \"\"\" Create a deep copy of this", "A freshly instantiated copy of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\"", "of encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\" Create a deep copy", "under the 3-Clause BSD License. Please see 'LICENSE.rst' for further # information. __name__", "Base class for all objects, that should be copyable. The public method `copy()`", "not call the `super()` method here. This method must be overridden, if the", "for all objects, that should be copyable. The public method `copy()` first creates", "instance, that is copied via the `__copy_create__()` method. All variables, that could not", "should be copyable. The public method `copy()` first creates a fresh instance of", "source code is part of the Biotite package and is distributed # under", "that need to be copied are handled in `__copy_fill__()` Do not call the", "`__copy_fill__()` Do not call the `super()` method here. This method must be overridden,", "in the uppermost base class and ending with the class of the instance", "*self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes to", "public method `copy()` first creates a fresh instance of the class of the", "def __copy_create__(self): \"\"\" Instantiate a new object of this class. Only the constructor", "as first statement. Parameters ---------- clone The freshly instantiated copy of *self*. \"\"\"", "The public method `copy()` first creates a fresh instance of the class of", "be called in this method. All further attributes, that need to be copied", "is distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst' for further", "be overridden, if the constructor takes parameters. Returns ------- copy A freshly instantiated", "approach solves the problem of encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\"", "3-Clause BSD License. Please see 'LICENSE.rst' for further # information. __name__ = \"biotite\"", "[\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that should", "def copy(self): \"\"\" Create a deep copy of this object. Returns ------- copy", "of the Biotite package and is distributed # under the 3-Clause BSD License.", "Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that should be copyable. The public", "encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\" Create a deep copy of", "copy A copy of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone", "return clone def __copy_create__(self): \"\"\" Instantiate a new object of this class. Only", "All further attributes, that need to be copied are handled in `__copy_fill__()` Do", "to be copied are handled in `__copy_fill__()` Do not call the `super()` method", "the `super()` method as first statement. Parameters ---------- clone The freshly instantiated copy", "new object of this class. Only the constructor should be called in this", "copy(self): \"\"\" Create a deep copy of this object. Returns ------- copy A", "\"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a new", "__name__ = \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta):", "the new object. Always call the `super()` method as first statement. Parameters ----------", "of this object. Returns ------- copy A copy of this object. \"\"\" clone", "the 3-Clause BSD License. Please see 'LICENSE.rst' for further # information. __name__ =", "superclasses. \"\"\" def copy(self): \"\"\" Create a deep copy of this object. Returns", "__copy_create__(self): \"\"\" Instantiate a new object of this class. Only the constructor should", "Biotite package and is distributed # under the 3-Clause BSD License. Please see", "and ending with the class of the instance to be copied. This approach", "class for all objects, that should be copyable. The public method `copy()` first", "first creates a fresh instance of the class of the instance, that is", "the instance, that is copied via the `__copy_create__()` method. All variables, that could", "this object. Returns ------- copy A copy of this object. \"\"\" clone =", "distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst' for further #", "------- copy A freshly instantiated copy of *self*. \"\"\" return type(self)() def __copy_fill__(self,", "creates a fresh instance of the class of the instance, that is copied", "type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes to the new object.", "__author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class", "Create a deep copy of this object. Returns ------- copy A copy of", "= [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that", "\"\"\" Instantiate a new object of this class. Only the constructor should be", "\"\"\" Base class for all objects, that should be copyable. The public method", "attributes, that need to be copied are handled in `__copy_fill__()` Do not call", "BSD License. Please see 'LICENSE.rst' for further # information. __name__ = \"biotite\" __author__", "copy A freshly instantiated copy of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone):", "ending with the class of the instance to be copied. This approach solves", "of the instance to be copied. This approach solves the problem of encapsulated", "self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a new object of this", "are handled in `__copy_fill__()` Do not call the `super()` method here. This method", "fresh instance of the class of the instance, that is copied via the", "instance of the class of the instance, that is copied via the `__copy_create__()`", "= \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\"", "__copy_fill__(self, clone): \"\"\" Copy all necessary attributes to the new object. Always call", "of the instance, that is copied via the `__copy_create__()` method. All variables, that", "objects, that should be copyable. The public method `copy()` first creates a fresh", "clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a new object", "= self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a new object of", "base class and ending with the class of the instance to be copied.", "of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes", "method in the uppermost base class and ending with the class of the", "# information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc", "Returns ------- copy A copy of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone)", "need to be copied are handled in `__copy_fill__()` Do not call the `super()`", "`super()` method as first statement. Parameters ---------- clone The freshly instantiated copy of", "class and ending with the class of the instance to be copied. This", "method must be overridden, if the constructor takes parameters. Returns ------- copy A", "be copied. This approach solves the problem of encapsulated variables in superclasses. \"\"\"", "not be set via the constructor, are then copied via `__copy_fill__()`, starting with", "object of this class. Only the constructor should be called in this method.", "variables, that could not be set via the constructor, are then copied via", "freshly instantiated copy of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy", "`__copy_fill__()`, starting with the method in the uppermost base class and ending with", "starting with the method in the uppermost base class and ending with the", "This source code is part of the Biotite package and is distributed #", "instantiated copy of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all", "that could not be set via the constructor, are then copied via `__copy_fill__()`,", "in `__copy_fill__()` Do not call the `super()` method here. This method must be", "\"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes to the", "the uppermost base class and ending with the class of the instance to", "takes parameters. Returns ------- copy A freshly instantiated copy of *self*. \"\"\" return", "Instantiate a new object of this class. Only the constructor should be called", "call the `super()` method as first statement. Parameters ---------- clone The freshly instantiated", "this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate", "the class of the instance to be copied. This approach solves the problem", "method. All variables, that could not be set via the constructor, are then", "Please see 'LICENSE.rst' for further # information. __name__ = \"biotite\" __author__ = \"<NAME>\"", "A copy of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def", "the `__copy_create__()` method. All variables, that could not be set via the constructor,", "via the `__copy_create__()` method. All variables, that could not be set via the", "self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a new object of this class.", "\"\"\" Copy all necessary attributes to the new object. Always call the `super()`", "of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\"", "return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes to the new", "copied via `__copy_fill__()`, starting with the method in the uppermost base class and", "the method in the uppermost base class and ending with the class of", "a deep copy of this object. Returns ------- copy A copy of this", "further attributes, that need to be copied are handled in `__copy_fill__()` Do not", "class. Only the constructor should be called in this method. All further attributes,", "in this method. All further attributes, that need to be copied are handled", "all necessary attributes to the new object. Always call the `super()` method as", "deep copy of this object. Returns ------- copy A copy of this object.", "with the class of the instance to be copied. This approach solves the", "License. Please see 'LICENSE.rst' for further # information. __name__ = \"biotite\" __author__ =", "the `super()` method here. This method must be overridden, if the constructor takes", "part of the Biotite package and is distributed # under the 3-Clause BSD", "if the constructor takes parameters. Returns ------- copy A freshly instantiated copy of", "Copy all necessary attributes to the new object. Always call the `super()` method", "constructor takes parameters. Returns ------- copy A freshly instantiated copy of *self*. \"\"\"", "copy of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self):", "def __copy_fill__(self, clone): \"\"\" Copy all necessary attributes to the new object. Always", "constructor, are then copied via `__copy_fill__()`, starting with the method in the uppermost", "called in this method. All further attributes, that need to be copied are", "class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that should be copyable. The", "This approach solves the problem of encapsulated variables in superclasses. \"\"\" def copy(self):", "the constructor, are then copied via `__copy_fill__()`, starting with the method in the", "of the class of the instance, that is copied via the `__copy_create__()` method.", "attributes to the new object. Always call the `super()` method as first statement.", "`__copy_create__()` method. All variables, that could not be set via the constructor, are", "this method. All further attributes, that need to be copied are handled in", "object. Returns ------- copy A copy of this object. \"\"\" clone = self.__copy_create__()", "to the new object. Always call the `super()` method as first statement. Parameters", "copyable. The public method `copy()` first creates a fresh instance of the class", "method `copy()` first creates a fresh instance of the class of the instance,", "------- copy A copy of this object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return", "and is distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst' for", "'LICENSE.rst' for further # information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__ =", "object. Always call the `super()` method as first statement. Parameters ---------- clone The", "is part of the Biotite package and is distributed # under the 3-Clause", "method as first statement. Parameters ---------- clone The freshly instantiated copy of *self*.", "# under the 3-Clause BSD License. Please see 'LICENSE.rst' for further # information.", "a fresh instance of the class of the instance, that is copied via", "clone): \"\"\" Copy all necessary attributes to the new object. Always call the", "see 'LICENSE.rst' for further # information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__", "Do not call the `super()` method here. This method must be overridden, if", "solves the problem of encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\" Create", "This method must be overridden, if the constructor takes parameters. Returns ------- copy", "set via the constructor, are then copied via `__copy_fill__()`, starting with the method", "class of the instance, that is copied via the `__copy_create__()` method. All variables,", "`copy()` first creates a fresh instance of the class of the instance, that", "then copied via `__copy_fill__()`, starting with the method in the uppermost base class", "be copied are handled in `__copy_fill__()` Do not call the `super()` method here.", "Only the constructor should be called in this method. All further attributes, that", "that should be copyable. The public method `copy()` first creates a fresh instance", "a new object of this class. Only the constructor should be called in", "\"\"\" def copy(self): \"\"\" Create a deep copy of this object. Returns -------", "further # information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import", "# This source code is part of the Biotite package and is distributed", "that is copied via the `__copy_create__()` method. All variables, that could not be", "package and is distributed # under the 3-Clause BSD License. Please see 'LICENSE.rst'", "uppermost base class and ending with the class of the instance to be", "copied. This approach solves the problem of encapsulated variables in superclasses. \"\"\" def", "Always call the `super()` method as first statement. Parameters ---------- clone The freshly", "parameters. Returns ------- copy A freshly instantiated copy of *self*. \"\"\" return type(self)()", "the class of the instance, that is copied via the `__copy_create__()` method. All", "method here. This method must be overridden, if the constructor takes parameters. Returns", "in superclasses. \"\"\" def copy(self): \"\"\" Create a deep copy of this object.", "\"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base", "via `__copy_fill__()`, starting with the method in the uppermost base class and ending", "information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"] import abc class", "could not be set via the constructor, are then copied via `__copy_fill__()`, starting", "with the method in the uppermost base class and ending with the class", "overridden, if the constructor takes parameters. Returns ------- copy A freshly instantiated copy", "be copyable. The public method `copy()` first creates a fresh instance of the", "method. All further attributes, that need to be copied are handled in `__copy_fill__()`", "= \"<NAME>\" __all__ = [\"Copyable\"] import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for", "be set via the constructor, are then copied via `__copy_fill__()`, starting with the", "the problem of encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\" Create a", "<filename>src/biotite/copyable.py # This source code is part of the Biotite package and is", "problem of encapsulated variables in superclasses. \"\"\" def copy(self): \"\"\" Create a deep", "object. \"\"\" clone = self.__copy_create__() self.__copy_fill__(clone) return clone def __copy_create__(self): \"\"\" Instantiate a", "constructor should be called in this method. All further attributes, that need to", "the constructor takes parameters. Returns ------- copy A freshly instantiated copy of *self*.", "the constructor should be called in this method. All further attributes, that need", "Returns ------- copy A freshly instantiated copy of *self*. \"\"\" return type(self)() def", "\"\"\" Create a deep copy of this object. Returns ------- copy A copy", "import abc class Copyable(metaclass=abc.ABCMeta): \"\"\" Base class for all objects, that should be", "all objects, that should be copyable. The public method `copy()` first creates a", "class of the instance to be copied. This approach solves the problem of", "copy of *self*. \"\"\" return type(self)() def __copy_fill__(self, clone): \"\"\" Copy all necessary", "of this class. Only the constructor should be called in this method. All", "for further # information. __name__ = \"biotite\" __author__ = \"<NAME>\" __all__ = [\"Copyable\"]", "clone def __copy_create__(self): \"\"\" Instantiate a new object of this class. Only the", "is copied via the `__copy_create__()` method. All variables, that could not be set" ]
[ "} @property def available(self) -> bool: return self._available @property def name(self): \"\"\"Return the", "import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry", "= self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info:", "homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client", "Client from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION", "async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client", "binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return", "client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError", "camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available:", "return self._available @property def name(self): \"\"\"Return the display name of this switch.\"\"\" return", "display name of this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true if", "try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value", "homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from", "True self._last_event = latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts else: self._on", "except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info: if property_id", ") from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant", "def name(self): \"\"\"Return the display name of this switch.\"\"\" return self._device.nickname @property def", "def is_on(self): \"\"\"Return true if switch is on.\"\"\" return self._on @property def unique_id(self):", "return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes", "homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import", "self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self): return { \"identifiers\": { (DOMAIN,", "property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if value", "def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\",", "property_id == PropertyIDs.AVAILABLE: self._available = True if value == \"1\" else False latest_event", "\"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION", "= hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as e:", "@property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError:", "model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try:", "\"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION:", "= latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts else: self._on = False", "device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info", "HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import", "wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from", "Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as", "@property def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\":", "self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def", "else False latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts >", "SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi", "= self._client.get_info(self._device) for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available =", "\"device model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self):", "value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if value ==", "import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client", "entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\":", "_LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)]", ".const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL", "(DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self)", "hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client,", "from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client", "if latest_event is not None: if latest_event.event_ts > self._last_event: self._on = True self._last_event", "\"000\") @property def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name,", "import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided", "__init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device = device self._last_event =", "\"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) -> bool: return self._available @property def", "value == \"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event is not None:", "self._device = device self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self): return {", "{ ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id }", "}, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) -> bool:", "from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries", "\"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self):", "from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\"", "\"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property", "datetime import timedelta from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION", "def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\"", "time from datetime import timedelta from typing import List from homeassistant.components.binary_sensor import (", "if value == \"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event is not", "{ \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model }", "from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from", "is not None: if latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts", "in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self,", "= True self._last_event = latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts else:", "from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const", "bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device", "\"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info =", "device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if value == \"1\" else", "self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on = False self._last_event =", "ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device,", "by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating", "PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by", "} @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except", "_available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device =", "def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate()", "True if value == \"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event is", "= wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self):", "import timedelta from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION )", "PropertyIDs.AVAILABLE: self._available = True if value == \"1\" else False latest_event = self._client.get_latest_event(self._device)", "logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass:", "device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available,", "Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const import", "async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras()", "unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return", "int(str(int(time.time())) + \"000\") @property def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) },", "of this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true if switch is", "from datetime import timedelta from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity,", "this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true if switch is on.\"\"\"", "self._available @property def name(self): \"\"\"Return the display name of this switch.\"\"\" return self._device.nickname", "_LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async", "in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if value == \"1\"", "client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras,", "WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try:", "return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client,", "return self._device.nickname @property def is_on(self): \"\"\"Return true if switch is on.\"\"\" return self._on", "False latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts > self._last_event:", "self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts > self._last_event: self._on = True", "from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs", "not None: if latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts else:", "self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of", "wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self): return", "\"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) -> bool: return self._available @property", "of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\":", "Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new", "import logging import time from datetime import timedelta from typing import List from", "= self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts > self._last_event: self._on =", "def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\":", "WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client =", "import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION", "hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e)", "component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras() except", "\"\"\"Return the display name of this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return", "device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on,", "self._device.product_model } @property def available(self) -> bool: return self._available @property def name(self): \"\"\"Return", "> self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on = False self._last_event", "camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def", "@property def is_on(self): \"\"\"Return true if switch is on.\"\"\" return self._on @property def", "@property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of the", "name(self): \"\"\"Return the display name of this switch.\"\"\" return self._device.nickname @property def is_on(self):", "= device self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self): return { \"identifiers\":", "for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool", "is_on(self): \"\"\"Return true if switch is on.\"\"\" return self._on @property def unique_id(self): return", "import Client from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__)", "self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) ->", "self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) -> bool: return self._available", "\"model\": self._device.product_model } @property def available(self) -> bool: return self._available @property def name(self):", "client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class", "self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + \"000\") @property def", "== \"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if", "def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for", "def get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate()", "device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\":", "latest_event is not None: if latest_event.event_ts > self._last_event: self._on = True self._last_event =", "return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return {", "@property def available(self) -> bool: return self._available @property def name(self): \"\"\"Return the display", "{ (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def", "from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION =", "true if switch is on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property", "get_cameras() -> List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return", "self._available = True if value == \"1\" else False latest_event = self._client.get_latest_event(self._device) if", "e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await", "the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model,", "None: if latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on", "provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities):", "= [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on:", "= timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary", "ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant,", "-> List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras()", "return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True)", "switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true if switch is on.\"\"\" return", "self._client.get_info(self._device) for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True", "device: Device): self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + \"000\")", "True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device):", "new WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]:", "_LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() ->", "bool: return self._available @property def name(self): \"\"\"Return the display name of this switch.\"\"\"", "DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import", "AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera", "device self._last_event = int(str(int(time.time())) + \"000\") @property def device_info(self): return { \"identifiers\": {", "DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10)", "async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\")", "if latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on =", "\"\"\"Return true if switch is on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac)", "if property_id == PropertyIDs.AVAILABLE: self._available = True if value == \"1\" else False", "wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time()))", "switch is on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self):", "AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const import DOMAIN", "latest_event.event_ts > self._last_event: self._on = True self._last_event = latest_event.event_ts else: self._on = False", "wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER =", "_on: bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client", "def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device = device self._last_event", "return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info =", "device_info = self._client.get_info(self._device) for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available", "= int(str(int(time.time())) + \"000\") @property def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac)", "homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from", "device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in", "List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from", "bool def __init__(self, wyzeapi_client: Client, device: Device): self._client = wyzeapi_client self._device = device", "as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for camera in", "on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return device", "= logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def", "\"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts", "attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device", "from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError", "logging import time from datetime import timedelta from typing import List from homeassistant.components.binary_sensor", "ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property def", "self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info: if", "\"\"\"Return device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\":", "self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device)", "import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types", "return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model", "== PropertyIDs.AVAILABLE: self._available = True if value == \"1\" else False latest_event =", "async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client, device:", "client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera)", "import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data provided by Wyze\" SCAN_INTERVAL =", "name of this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true if switch", "await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client:", "= \"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry:", "the display name of this switch.\"\"\" return self._device.nickname @property def is_on(self): \"\"\"Return true", "available(self) -> bool: return self._available @property def name(self): \"\"\"Return the display name of", "\"name\": self.name, \"manufacturer\": \"WyzeLabs\", \"model\": self._device.product_model } @property def available(self) -> bool: return", "self._last_event = latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts else: self._on =", "ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def", "import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import", "( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from", "DEVICE_CLASS_MOTION def update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device)", "ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property", "latest_event = self._client.get_latest_event(self._device) if latest_event is not None: if latest_event.event_ts > self._last_event: self._on", "= True if value == \"1\" else False latest_event = self._client.get_latest_event(self._device) if latest_event", "Device): self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) + \"000\") @property", "from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER", "for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE: self._available = True if", "\"Data provided by Wyze\" SCAN_INTERVAL = timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry,", "Client, device: Device): self._client = wyzeapi_client self._device = device self._last_event = int(str(int(time.time())) +", "return { ATTR_ATTRIBUTION: ATTRIBUTION, \"state\": self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id", "except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras = [WyzeCameraMotion(client, camera) for", "cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity):", "wyzeapy.types import PropertyIDs from .const import DOMAIN _LOGGER = logging.getLogger(__name__) ATTRIBUTION = \"Data", "ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from wyzeapy.base_client import Device, AccessTokenError from wyzeapy.client import", "is on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def device_state_attributes(self): \"\"\"Return", "import time from datetime import timedelta from typing import List from homeassistant.components.binary_sensor import", "typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import", "List[Device]: try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras", "class WyzeCameraMotion(BinarySensorEntity): _on: bool _available: bool def __init__(self, wyzeapi_client: Client, device: Device): self._client", "def available(self) -> bool: return self._available @property def name(self): \"\"\"Return the display name", "def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client:", "sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id] def get_cameras() -> List[Device]: try: return client.get_cameras()", "self.is_on, \"available\": self.available, \"device model\": self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return", "AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info: if property_id ==", "[WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)] async_add_entities(cameras, True) class WyzeCameraMotion(BinarySensorEntity): _on: bool", "if switch is on.\"\"\" return self._on @property def unique_id(self): return \"{}-motion\".format(self._device.mac) @property def", "import Device, AccessTokenError from wyzeapy.client import Client from wyzeapy.types import PropertyIDs from .const", "self._device.product_model, \"mac\": self.unique_id } @property def device_class(self): return DEVICE_CLASS_MOTION def update(self): try: device_info", "@property def name(self): \"\"\"Return the display name of this switch.\"\"\" return self._device.nickname @property", "self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id, value in device_info: if property_id == PropertyIDs.AVAILABLE:", "update(self): try: device_info = self._client.get_info(self._device) except AccessTokenError: self._client.reauthenticate() device_info = self._client.get_info(self._device) for property_id,", "config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client = hass.data[DOMAIN][config_entry.entry_id]", "self._on = True self._last_event = latest_event.event_ts else: self._on = False self._last_event = latest_event.event_ts", "+ \"000\") @property def device_info(self): return { \"identifiers\": { (DOMAIN, self._device.mac) }, \"name\":", "timedelta from typing import List from homeassistant.components.binary_sensor import ( BinarySensorEntity, DEVICE_CLASS_MOTION ) from", "@property def device_state_attributes(self): \"\"\"Return device attributes of the entity.\"\"\" return { ATTR_ATTRIBUTION: ATTRIBUTION,", "try: return client.get_cameras() except AccessTokenError as e: _LOGGER.warning(e) client.reauthenticate() return client.get_cameras() cameras =", "BinarySensorEntity, DEVICE_CLASS_MOTION ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core", "self._device.nickname @property def is_on(self): \"\"\"Return true if switch is on.\"\"\" return self._on @property", "timedelta(seconds=10) async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor", "HomeAssistant, config_entry: ConfigEntry, async_add_entities): _LOGGER.debug(\"\"\"Creating new WyzeApi binary sensor component\"\"\") client: Client =", "-> bool: return self._available @property def name(self): \"\"\"Return the display name of this" ]
[ "# ------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get()", "array(lon) lat = array(lat) # List of HDF files for a given date", "--------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon,", "else: self.Files = Path # From a list of lat and lon, return", "for name in self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for", "Path -- for now a single file. Eventually implement a single directory, or", "the nearest neighbor on the grid (dx,dy) \"\"\" dLon = 0.05 dLat =", "day of Level 3 MCD43C1 present on a given *Path* and returns an", "BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ ==", "tile, where, concatenate, flipud from numpy import ones from datetime import date, datetime,", "self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def", "= 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 =", "an object with all 3 kernels coeff. On input, Required parameters: Path --", "= [Path] else: self.Files = Path # From a list of lat and", "from datetime import date, datetime, timedelta from glob import glob from pyhdf.SD import", "#............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat =", "LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6',", "of HDF files for a given date #----------------------------------- self.verb = Verb self.SDS =", "MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list", "self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if", "= 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 =", "= -180 - dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy", "#........................................................................... class McD43C(object): \"\"\" This class implements the MODIS LAND BRDF 16-day Level", "\"\"\" Reads climate modeling grid 0.05 degree MCD43 BRDF files. \"\"\" import os", "and lon, return the # dx, dy on the grid # ------------------------------------- self.nobs", "from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF =", "----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs))", "MCD43C1 (0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for", "-90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy',", "v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] =", "# dx, dy on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) #", "Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is str:", "band.\"\"\" # Create empty lists for SDS to be read from file #", "= 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 =", "files for a given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles", "= SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is str: self.Files =", "HDF4Error: if self.verb > 2: print \"- %s: not recognized as an HDF", "= 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 =", "'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict", "numpy import ones from datetime import date, datetime, timedelta from glob import glob", "return # Read select variables (reshape to allow concatenation later) # ------------------------------------------------------------ for", "# Create empty lists for SDS to be read from file # -----------------------------------------------", "= 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 =", "* ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print \"[] Working on", "\"\"\"Reads MCD43C1 file with Level 3 BRDF kernels for each MODIS band.\"\"\" #", "self.Files = [Path] else: self.Files = Path # From a list of lat", "numbers to find the position of the nearest neighbor on the grid (dx,dy)", "BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4", "2: print \"- %s: not recognized as an HDF file\"%filename return # Read", "\"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1)", "(self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of Level 3 MCD43C1 present on", "(dx,dy) \"\"\" dLon = 0.05 dLat = 0.05 Lon0 = -180 - dLon", "'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553',", "'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL =", "'*.hdf') if type(Path) is str: self.Files = [Path] else: self.Files = Path #", "# --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat,", "self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1", "concatenate, flipud from numpy import ones from datetime import date, datetime, timedelta from", "file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF = MISSING", "in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if", "BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print", "os import sys from numpy import loadtxt, array, tile, where, concatenate, flipud from", "file\"%filename return # Read select variables (reshape to allow concatenation later) # ------------------------------------------------------------", "self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon", "self.Files: try: if self.verb: print \"[] Working on \"+fn hfile = SD(fn) except", "= ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'),", "array, tile, where, concatenate, flipud from numpy import ones from datetime import date,", "'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114',", "MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz res), \"\"\"", "hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v", "MCD43 BRDF files. \"\"\" import os import sys from numpy import loadtxt, array,", "self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)]", ") ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3", "lat, lon, return numbers to find the position of the nearest neighbor on", "MISSING = 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7',", "not recognized as an HDF file\"%filename return # Read select variables (reshape to", "self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF kernels for", "BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6", "a list of lat and lon, return the # dx, dy on the", "v = a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:],", "if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or", "\"+fn hfile = SD(fn) except HDF4Error: if self.verb > 2: print \"- %s:", "0.05 Lon0 = -180 - dLon Lat0 = -90 + dLat self.dx =", "the position of the nearest neighbor on the grid (dx,dy) \"\"\" dLon =", "implements the MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz", "'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241',", "= -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print", "v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v)", "for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] =", "the MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz res),", "date, datetime, timedelta from glob import glob from pyhdf.SD import SD, HDF4Error MISSING", "'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS", "each MODIS band.\"\"\" # Create empty lists for SDS to be read from", "import SD, HDF4Error MISSING = 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2',", "\"- %s: not recognized as an HDF file\"%filename return # Read select variables", "a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] =", "self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files:", "+ dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy", "read from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF", "From a list of lat and lon, return the # dx, dy on", "Read select variables (reshape to allow concatenation later) # ------------------------------------------------------------ for sds in", "a given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path", "type(Path) is str: self.Files = [Path] else: self.Files = Path # From a", "'/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1)", "('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL", "flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if", "lon, return numbers to find the position of the nearest neighbor on the", "= 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 =", "\"\"\" import os import sys from numpy import loadtxt, array, tile, where, concatenate,", "0.05 degree MCD43 BRDF files. \"\"\" import os import sys from numpy import", "list: lon = array(lon) lat = array(lat) # List of HDF files for", "or a list of files and directories. \"\"\" if type(lon) is list: lon", "= [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat) ex =", "On input, Required parameters: Path -- for now a single file. Eventually implement", "BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class", "'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class implements the", "= 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\"", "BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in", "[Path] else: self.Files = Path # From a list of lat and lon,", "path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat", "BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2", "file. Eventually implement a single directory, or a list of files and directories.", "datetime import date, datetime, timedelta from glob import glob from pyhdf.SD import SD,", "of Level 3 MCD43C1 present on a given *Path* and returns an object", "BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds", "(0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with", "loadtxt, array, tile, where, concatenate, flipud from numpy import ones from datetime import", "sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes()", "type(lon) is list: lon = array(lon) lat = array(lat) # List of HDF", "'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629',", "= len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile # ---------------------------------", "for SDS to be read from file # ----------------------------------------------- for name in self.SDS:", "BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6", "self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is str: self.Files", "hfile = SD(fn) except HDF4Error: if self.verb > 2: print \"- %s: not", "to allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb: print", "coeff. On input, Required parameters: Path -- for now a single file. Eventually", "BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS:", "present on a given *Path* and returns an object with all 3 kernels", "_findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return numbers to find the position", "for one day of Level 3 MCD43C1 present on a given *Path* and", "lon = array(lon) lat = array(lat) # List of HDF files for a", "BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1", "= dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4", "= 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 =", "files. \"\"\" import os import sys from numpy import loadtxt, array, tile, where,", "self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0:", "Reads files for one day of Level 3 MCD43C1 present on a given", "3 kernels coeff. On input, Required parameters: Path -- for now a single", "dLon = 0.05 dLat = 0.05 Lon0 = -180 - dLon Lat0 =", "BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7", "= 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 =", "self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1]", "parameters: Path -- for now a single file. Eventually implement a single directory,", "with Level 3 BRDF kernels for each MODIS band.\"\"\" # Create empty lists", "> 2: print \"- %s: not recognized as an HDF file\"%filename return #", "modeling grid 0.05 degree MCD43 BRDF files. \"\"\" import os import sys from", "BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5", "*Path* and returns an object with all 3 kernels coeff. On input, Required", "BRDF kernels for each MODIS band.\"\"\" # Create empty lists for SDS to", "= '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat =", "v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds]", "an HDF file\"%filename return # Read select variables (reshape to allow concatenation later)", "import glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS = dict", "BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1", "position of the nearest neighbor on the grid (dx,dy) \"\"\" dLon = 0.05", "flipud from numpy import ones from datetime import date, datetime, timedelta from glob", "directory, or a list of files and directories. \"\"\" if type(lon) is list:", "dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 =", "in self.Files: try: if self.verb: print \"[] Working on \"+fn hfile = SD(fn)", "'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553',", "pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS = dict ( LAND =", "\"\"\" if type(lon) is list: lon = array(lon) lat = array(lat) # List", "lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat) ex", "given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path +", "lat = array(lat) # List of HDF files for a given date #-----------------------------------", "List of HDF files for a given date #----------------------------------- self.verb = Verb self.SDS", "Path # From a list of lat and lon, return the # dx,", "to find the position of the nearest neighbor on the grid (dx,dy) \"\"\"", "= dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7',", "the # dx, dy on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat)", "degree MCD43 BRDF files. \"\"\" import os import sys from numpy import loadtxt,", "HDF file\"%filename return # Read select variables (reshape to allow concatenation later) #", "(reshape to allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb:", "single directory, or a list of files and directories. \"\"\" if type(lon) is", "list of files and directories. \"\"\" if type(lon) is list: lon = array(lon)", "class implements the MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree", "'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114',", "= glob(Path + '*.hdf') if type(Path) is str: self.Files = [Path] else: self.Files", "on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel", "'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 =", "of lat and lon, return the # dx, dy on the grid #", "a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v", "on a given *Path* and returns an object with all 3 kernels coeff.", "be read from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] = []", "BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class implements the MODIS", "empty lists for SDS to be read from file # ----------------------------------------------- for name", "list of lat and lon, return the # dx, dy on the grid", "class McD43C(object): \"\"\" This class implements the MODIS LAND BRDF 16-day Level 3", "= 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 =", "self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try:", "#--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return numbers to find", "array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds", "or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx),", "all 3 kernels coeff. On input, Required parameters: Path -- for now a", "#--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF kernels for each", "= array(lat) # List of HDF files for a given date #----------------------------------- self.verb", "find the position of the nearest neighbor on the grid (dx,dy) \"\"\" dLon", "later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v =", "= 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 =", "__init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of Level 3 MCD43C1 present", "glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS = dict (", "-180 - dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy =", "variables (reshape to allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if", "= flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:]", "res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of Level", "self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile #", "fn in self.Files: try: if self.verb: print \"[] Working on \"+fn hfile =", "'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary',", "grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a", "self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path)", "if self.verb: print \"[] Working on \"+fn hfile = SD(fn) except HDF4Error: if", "#self.Tfiles = glob(Path + '*.hdf') if type(Path) is str: self.Files = [Path] else:", "self.verb: print \"[] Working on \"+fn hfile = SD(fn) except HDF4Error: if self.verb", "grid (dx,dy) \"\"\" dLon = 0.05 dLat = 0.05 Lon0 = -180 -", "# From a list of lat and lon, return the # dx, dy", "str: self.Files = [Path] else: self.Files = Path # From a list of", "dLat = 0.05 Lon0 = -180 - dLon Lat0 = -90 + dLat", "in self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in", "McD43C(object): \"\"\" This class implements the MODIS LAND BRDF 16-day Level 3 products,", "a MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a", "lists for SDS to be read from file # ----------------------------------------------- for name in", "kernels coeff. On input, Required parameters: Path -- for now a single file.", "= 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2',", "with all 3 kernels coeff. On input, Required parameters: Path -- for now", "kernel in a MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat):", "dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #---", "self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return", "'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241',", "in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #---", "recognized as an HDF file\"%filename return # Read select variables (reshape to allow", "import loadtxt, array, tile, where, concatenate, flipud from numpy import ones from datetime", "= 0.05 Lon0 = -180 - dLon Lat0 = -90 + dLat self.dx", "= Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is", "'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class", "directories. \"\"\" if type(lon) is list: lon = array(lon) lat = array(lat) #", "if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5]", "BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2", "\"[] Working on \"+fn hfile = SD(fn) except HDF4Error: if self.verb > 2:", "'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629',", "Required parameters: Path -- for now a single file. Eventually implement a single", "dy on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF", "# Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return numbers", "v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys():", "if type(Path) is str: self.Files = [Path] else: self.Files = Path # From", "SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if type(Path) is str: self.Files = [Path]", "'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS =", "[] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb:", "= SD(fn) except HDF4Error: if self.verb > 2: print \"- %s: not recognized", "= 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 =", "tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of", "a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape,", "BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz res), \"\"\" def __init__", "= 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 =", "'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629',", "# ----------------------------------------------- for name in self.SDS: self.__dict__[name] = [] BRDF = MISSING *", "BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7", "'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645',", "== \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon =", "import os import sys from numpy import loadtxt, array, tile, where, concatenate, flipud", "BRDF files. \"\"\" import os import sys from numpy import loadtxt, array, tile,", "= [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat =", "climate modeling grid 0.05 degree MCD43 BRDF files. \"\"\" import os import sys", "3 products, MCD43C1 (0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads", "( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553',", "\"\"\" This class implements the MODIS LAND BRDF 16-day Level 3 products, MCD43C1", "array(lat) # List of HDF files for a given date #----------------------------------- self.verb =", "HDF4Error MISSING = 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6',", "for fn in self.Files: try: if self.verb: print \"[] Working on \"+fn hfile", "BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object):", "= v[array(self.dy), array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in", "\"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of Level 3", "= hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] *", "self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3", "\"\"\" Reads files for one day of Level 3 MCD43C1 present on a", "'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856',", "# Read BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF() # Result", "0.05 dLat = 0.05 Lon0 = -180 - dLon Lat0 = -90 +", "array(self.dx)] for sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]]", "dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2',", "LAND BRDF 16-day Level 3 products, MCD43C1 (0.05 degree horz res), \"\"\" def", "for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a =", "= (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file", "from glob import glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS", "a list of files and directories. \"\"\" if type(lon) is list: lon =", "# Read select variables (reshape to allow concatenation later) # ------------------------------------------------------------ for sds", "date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf')", "in a MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given", "datetime, timedelta from glob import glob from pyhdf.SD import SD, HDF4Error MISSING =", "<filename>src/Components/missions/GEMS/mcd43c.py \"\"\" Reads climate modeling grid 0.05 degree MCD43 BRDF files. \"\"\" import", "------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile", "'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5 = 'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645',", "BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7", "= MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print \"[]", "ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf'", "= 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class implements", "a single file. Eventually implement a single directory, or a list of files", "BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', )", "= ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645',", "= 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 =", "products, MCD43C1 (0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files", "'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) )", "self.verb > 2: print \"- %s: not recognized as an HDF file\"%filename return", "glob import glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS =", "MODIS band.\"\"\" # Create empty lists for SDS to be read from file", "the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in", "single file. Eventually implement a single directory, or a list of files and", "'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v =", "'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1", "def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return numbers to find the", "Reads climate modeling grid 0.05 degree MCD43 BRDF files. \"\"\" import os import", "= 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 =", "lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat", "SD(fn) except HDF4Error: if self.verb > 2: print \"- %s: not recognized as", "glob(Path + '*.hdf') if type(Path) is str: self.Files = [Path] else: self.Files =", "implement a single directory, or a list of files and directories. \"\"\" if", "#--- #............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat", "HDF files for a given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND']", ") ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 =", "files for one day of Level 3 MCD43C1 present on a given *Path*", "3 BRDF kernels for each MODIS band.\"\"\" # Create empty lists for SDS", "and returns an object with all 3 kernels coeff. On input, Required parameters:", "------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a", "and directories. \"\"\" if type(lon) is list: lon = array(lon) lat = array(lat)", "'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class implements the MODIS LAND BRDF", "Level 3 BRDF kernels for each MODIS band.\"\"\" # Create empty lists for", "len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF()", "the grid (dx,dy) \"\"\" dLon = 0.05 dLat = 0.05 Lon0 = -180", "numpy import loadtxt, array, tile, where, concatenate, flipud from numpy import ones from", "of files and directories. \"\"\" if type(lon) is list: lon = array(lon) lat", "of lat, lon, return numbers to find the position of the nearest neighbor", "a single directory, or a list of files and directories. \"\"\" if type(lon)", "SD, HDF4Error MISSING = 32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4',", "= array(lon) lat = array(lat) # List of HDF files for a given", "= 0.05 dLat = 0.05 Lon0 = -180 - dLon Lat0 = -90", "= 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 =", "'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality',", "print \"- %s: not recognized as an HDF file\"%filename return # Read select", "on \"+fn hfile = SD(fn) except HDF4Error: if self.verb > 2: print \"-", "import sys from numpy import loadtxt, array, tile, where, concatenate, flipud from numpy", "from numpy import ones from datetime import date, datetime, timedelta from glob import", "= 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241', BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 =", "a given *Path* and returns an object with all 3 kernels coeff. On", "a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if self.verb: print", "SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6',", "to be read from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name] =", "'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5 = 'KISO_b5_1241',", "- dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int)", "[-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat)", "dx, dy on the grid # ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read", "import date, datetime, timedelta from glob import glob from pyhdf.SD import SD, HDF4Error", "of the nearest neighbor on the grid (dx,dy) \"\"\" dLon = 0.05 dLat", "returns an object with all 3 kernels coeff. On input, Required parameters: Path", "Level 3 MCD43C1 present on a given *Path* and returns an object with", "from pyhdf.SD import SD, HDF4Error MISSING = 32.767 SDS = dict ( LAND", "= Path # From a list of lat and lon, return the #", "SDS to be read from file # ----------------------------------------------- for name in self.SDS: self.__dict__[name]", "if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level", "self.Files = Path # From a list of lat and lon, return the", "is str: self.Files = [Path] else: self.Files = Path # From a list", "'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo',", "= self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon =", "allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds)", "'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856',", "'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This", "print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy), array(self.dx)] for", "self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF() #", "in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path =", "select variables (reshape to allow concatenation later) # ------------------------------------------------------------ for sds in self.SDS:", "if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\":", "QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 =", "('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2", "hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if", "sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__ == \"__main__\": path", "if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset'] if self.verb:", "is list: lon = array(lon) lat = array(lat) # List of HDF files", "BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3", "\"\"\"Given a list of lat, lon, return numbers to find the position of", "16-day Level 3 products, MCD43C1 (0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1):", "MCD43C1 present on a given *Path* and returns an object with all 3", "if type(lon) is list: lon = array(lon) lat = array(lat) # List of", "for each MODIS band.\"\"\" # Create empty lists for SDS to be read", "degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day", "try: if self.verb: print \"[] Working on \"+fn hfile = SD(fn) except HDF4Error:", "__name__ == \"__main__\": path = '/nobackup/3/pcastell/MODIS/MCD43C1/MCD43C1.A2005361.005.2008094071946.hdf' lon = [-2.,-120.,15.2,17.2,170.1] lat = [88.,40.,-20.,-20.,-55.5] lon", "import ones from datetime import date, datetime, timedelta from glob import glob from", "for now a single file. Eventually implement a single directory, or a list", "32.767 SDS = dict ( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4',", "dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if", "(0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads", "given *Path* and returns an object with all 3 kernels coeff. On input,", "'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4', 'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', )", "MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print \"[] Working", "if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:] = v[array(self.dy),", "horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of", "# ------------------------------------- self.nobs = len(lon) self._findNearest(Path,lon,lat) # Read BRDF kernel in a MODIS", "3 MCD43C1 present on a given *Path* and returns an object with all", "'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465',", "MCD43C1 file with Level 3 BRDF kernels for each MODIS band.\"\"\" # Create", "ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if self.verb: print \"[] Working on \"+fn", "self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0", "Working on \"+fn hfile = SD(fn) except HDF4Error: if self.verb > 2: print", "on the grid (dx,dy) \"\"\" dLon = 0.05 dLat = 0.05 Lon0 =", "sys from numpy import loadtxt, array, tile, where, concatenate, flipud from numpy import", "nearest neighbor on the grid (dx,dy) \"\"\" dLon = 0.05 dLat = 0.05", "-- for now a single file. Eventually implement a single directory, or a", "one day of Level 3 MCD43C1 present on a given *Path* and returns", "now a single file. Eventually implement a single directory, or a list of", "Create empty lists for SDS to be read from file # ----------------------------------------------- for", "%s: not recognized as an HDF file\"%filename return # Read select variables (reshape", "print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF", "concatenation later) # ------------------------------------------------------------ for sds in self.SDS: if self.verb: print 'sds',self.SDS.index(sds) v", "= BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................ if __name__", "name in self.SDS: self.__dict__[name] = [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn", "v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor']", "BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6", "Read BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF() # Result #---", "'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856', BRDF_Albedo_Parameter3_Band3 = 'KGEO_b3_465',", ") #........................................................................... class McD43C(object): \"\"\" This class implements the MODIS LAND BRDF 16-day", "read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF kernels for each MODIS band.\"\"\"", "sds in self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds]", "# List of HDF files for a given date #----------------------------------- self.verb = Verb", "This class implements the MODIS LAND BRDF 16-day Level 3 products, MCD43C1 (0.05", "file with Level 3 BRDF kernels for each MODIS band.\"\"\" # Create empty", "= hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v + a['add_offset']", "input, Required parameters: Path -- for now a single file. Eventually implement a", "= [] BRDF = MISSING * ones((len(self.SDS),self.nobs)) for fn in self.Files: try: if", "'dx','dy', self.dx,self.dy #--- def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF kernels", "= 'KGEO_b7_2114', ) #........................................................................... class McD43C(object): \"\"\" This class implements the MODIS LAND", "ones from datetime import date, datetime, timedelta from glob import glob from pyhdf.SD", "[88.,40.,-20.,-20.,-55.5] lon = np.arange(-180,180,1) lat = np.arange(-90,90,1) lon,lat = np.meshgrid(lon,lat) ex = McD43C(path,lon.flatten(),lat.flatte())", "self.SDS: self.__dict__[sds] = BRDF[self.SDS.index(sds),:] if sds in ALIAS.keys(): self.__dict__[ALIAS[sds]] = self.__dict__[sds] #--- #............................................................................", "\"\"\" dLon = 0.05 dLat = 0.05 Lon0 = -180 - dLon Lat0", "#----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles = glob(Path + '*.hdf') if", "BRDF kernel in a MODIS tile # --------------------------------- self.read_BRDF() # Result #--- def", "def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one day of Level 3 MCD43C1", "= a['scale_factor'] * v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape", "except HDF4Error: if self.verb > 2: print \"- %s: not recognized as an", "return numbers to find the position of the nearest neighbor on the grid", "object with all 3 kernels coeff. On input, Required parameters: Path -- for", "lon, return the # dx, dy on the grid # ------------------------------------- self.nobs =", "= (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb: print 'dx','dy', self.dx,self.dy #--- def read_BRDF(self):", "a list of lat, lon, return numbers to find the position of the", "timedelta from glob import glob from pyhdf.SD import SD, HDF4Error MISSING = 32.767", "Eventually implement a single directory, or a list of files and directories. \"\"\"", "Lon0 = -180 - dLon Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int)", "+ '*.hdf') if type(Path) is str: self.Files = [Path] else: self.Files = Path", "= 'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 =", "neighbor on the grid (dx,dy) \"\"\" dLon = 0.05 dLat = 0.05 Lon0", "'BRDF_Albedo_Parameter3_Band5','BRDF_Albedo_Parameter3_Band6', 'BRDF_Albedo_Parameter3_Band7'), QUAL = ('BRDF_Albedo_Quality', 'Snow_BRDF_Albedo', 'BRDF_Albedo_Ancillary', ) ) ALIAS = dict (", "list of lat, lon, return numbers to find the position of the nearest", "lat and lon, return the # dx, dy on the grid # -------------------------------------", "a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v = a['scale_factor'] * v +", "print 'sds',self.SDS.index(sds) v = hfile.select(sds).get() a = hfile.select(sds).attributes() if a['scale_factor']!=1.0 or a['add_offset']!=0.0: v", "'KVOL_b5_1241', BRDF_Albedo_Parameter2_Band6 = 'KVOL_b6_1629', BRDF_Albedo_Parameter2_Band7 = 'KVOL_b7_2114', BRDF_Albedo_Parameter3_Band1 = 'KGEO_b1_645', BRDF_Albedo_Parameter3_Band2 = 'KGEO_b2_856',", "grid 0.05 degree MCD43 BRDF files. \"\"\" import os import sys from numpy", "files and directories. \"\"\" if type(lon) is list: lon = array(lon) lat =", "from numpy import loadtxt, array, tile, where, concatenate, flipud from numpy import ones", "where, concatenate, flipud from numpy import ones from datetime import date, datetime, timedelta", "+ a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v = flipud(v) BRDF[self.SDS.index(sds),:]", "BRDF_Albedo_Parameter1_Band6 = 'KISO_b6_1629', BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3", "def read_BRDF(self): \"\"\"Reads MCD43C1 file with Level 3 BRDF kernels for each MODIS", "Level 3 products, MCD43C1 (0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\"", "Result #--- def _findNearest(self,path,lon,lat): \"\"\"Given a list of lat, lon, return numbers to", "if self.verb > 2: print \"- %s: not recognized as an HDF file\"%filename", "ALIAS = dict ( BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465',", "BRDF_Albedo_Parameter1_Band7 = 'KISO_b7_2114', BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4", "kernels for each MODIS band.\"\"\" # Create empty lists for SDS to be", "as an HDF file\"%filename return # Read select variables (reshape to allow concatenation", "'KGEO_b3_465', BRDF_Albedo_Parameter3_Band4 = 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114',", "(0.05 degree horz res), \"\"\" def __init__ (self,Path,lon,lat,Verb=1): \"\"\" Reads files for one", "= 'KGEO_b4_553', BRDF_Albedo_Parameter3_Band5 = 'KGEO_b5_1241', BRDF_Albedo_Parameter3_Band6 = 'KGEO_b6_1629', BRDF_Albedo_Parameter3_Band7 = 'KGEO_b7_2114', ) #...........................................................................", "Lat0 = -90 + dLat self.dx = (0.5+(lon-Lon0)/dLon).astype(int) self.dy = (0.5+(lat-Lat0)/dLat).astype(int) if self.verb:", "return the # dx, dy on the grid # ------------------------------------- self.nobs = len(lon)", "* v + a['add_offset'] if self.verb: print array(self.dx), BRDF.shape, BRDF[self.SDS.index(sds),:], v.shape v =", "BRDF_Albedo_Parameter2_Band1 = 'KVOL_b1_645', BRDF_Albedo_Parameter2_Band2 = 'KVOL_b2_856', BRDF_Albedo_Parameter2_Band3 = 'KVOL_b3_465', BRDF_Albedo_Parameter2_Band4 = 'KVOL_b4_553', BRDF_Albedo_Parameter2_Band5", "print \"[] Working on \"+fn hfile = SD(fn) except HDF4Error: if self.verb >", "BRDF_Albedo_Parameter1_Band1 = 'KISO_b1_645', BRDF_Albedo_Parameter1_Band2 = 'KISO_b2_856', BRDF_Albedo_Parameter1_Band3 = 'KISO_b3_465', BRDF_Albedo_Parameter1_Band4 = 'KISO_b4_553', BRDF_Albedo_Parameter1_Band5", "( LAND = ('BRDF_Albedo_Parameter1_Band1','BRDF_Albedo_Parameter1_Band2', 'BRDF_Albedo_Parameter1_Band3','BRDF_Albedo_Parameter1_Band4', 'BRDF_Albedo_Parameter1_Band5','BRDF_Albedo_Parameter1_Band6', 'BRDF_Albedo_Parameter1_Band7', 'BRDF_Albedo_Parameter2_Band1','BRDF_Albedo_Parameter2_Band2', 'BRDF_Albedo_Parameter2_Band3','BRDF_Albedo_Parameter2_Band4', 'BRDF_Albedo_Parameter2_Band5','BRDF_Albedo_Parameter2_Band6', 'BRDF_Albedo_Parameter2_Band7', 'BRDF_Albedo_Parameter3_Band1','BRDF_Albedo_Parameter3_Band2', 'BRDF_Albedo_Parameter3_Band3','BRDF_Albedo_Parameter3_Band4',", "for a given date #----------------------------------- self.verb = Verb self.SDS = SDS['LAND'] #self.Tfiles =" ]
[ "backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers", "3 units = 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum':", "s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config))", "Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np,", "states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const", "h_input + h_state + h_const return output, [output] def get_config(self): config = {'units':", "6)), epochs=1, batch_size=10) # compare to not using batch_input_shape test_input = np.random.randint(5, size=(10,", "64)) ) # Test basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np", "= f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected)", "get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items()))", "np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10,", "loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic", "= layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights)", "# test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps,", "1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2)))", "len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights) ==", "model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop',", "# basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged", "keras import layers from keras.models import Sequential, Model, model_from_json from keras import backend", "= model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects):", "model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model = Sequential()", "than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel =", "shape') [input_shape, constant_shape] = input_shape # will (and should) raise if more than", "wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c],", "for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final", "self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape,", "c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects", "does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking", "K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK and MXNet.')", "== 'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with", "2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable =", "model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary() #", "= states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel)", "outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs =", "y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np,", "y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase to be set", "2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer =", "= lambda y, y_rev: (y + y_rev) / 2 elif merge_mode == 'concat':", "= layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c)", "shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False))", "s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer =", "4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x =", "timesteps = 2 output_dim = 2 dropout_rate = 0.2 for mode in ['sum',", "# Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) #", "def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps =", "wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0", "2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) ==", "y_rev) / 2 elif merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y,", "assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates():", "model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)),", "reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model =", "10, 2))) # Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0,", "y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward +", "shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None)))", "4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6),", "y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet',", "= Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer =", "# first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val", "timesteps, dim)) target_dim = 2 * output_dim if mode == 'concat' else output_dim", "outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with", "Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs)", "= 2 output_dim = 2 dropout_rate = 0.2 for mode in ['sum', 'concat']:", "Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse',", "mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "= Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6,", "= 5 timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps, dim)]", "does not support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self,", "dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with", "Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing", "{'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model =", "epochs=1, batch_size=10) # compare to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3,", "state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano'", "not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape", "= Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer =", "np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping", "batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional", "model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that", "2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)),", "that state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10.,", "func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1],", "4, 3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2", "mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val =", "c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer", "y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev] # basic case", "RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,),", "= Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs =", "test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1,", "model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses)", "= np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs)", "# compare to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32')", "= layers.SimpleRNN samples = 2 dim = 2 timesteps = 2 output_dim =", "np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i", "Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the", "input_shape # will (and should) raise if more than one constant passed self.input_kernel", "3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config() # test", "# test config model.get_config() # test when specifying a batch_input_shape test_input = np.random.random((1,", "= 2 dim = 5 timesteps = 3 units = 3 X =", "Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps", "mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def", "= model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN", "pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer =", "td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output)", "Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4),", "assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights)", "= layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5,", "assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu'))", "model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer", "# test passing invalid initial_state: passing a tensor input2 = Input((timesteps, dim)) with", "= wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert", "False assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) == 6 def", "len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) #", "output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list output =", "= Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4)))", "3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to not", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 =", "s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s =", "# embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1,", "test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4),", "test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop',", "model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i,", "state of a BiRNN is the concatenation of the underlying RNNs y_merged =", "@pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN cell yet') def", "assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping from inputs", "output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim),", "y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet',", "= [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input])", "model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and", "layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model", "c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np,", "dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model", "axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in", "reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with", "atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN cell yet')", "basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np", "4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs", "layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)),", "serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6,", "= wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert", "epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked", "c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom", "Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5,", "model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): #", "model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) ==", "assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape", "y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test", "len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights) ==", "for i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10,", "2)), epochs=1, batch_size=10) # test config model.get_config() # test when specifying a batch_input_shape", "merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config()", "dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs],", "model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3)))", "batch_size=10) # test config model.get_config() # test when specifying a batch_input_shape test_input =", "assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 =", "def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps =", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3", "3)), epochs=1, batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True),", "dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not", "wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x,", "epochs=1, batch_size=10) # test config model.get_config() # test when specifying a batch_input_shape test_input", "mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1])", "backend does not support TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test", "loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input,", "reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output,", "mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0],", "y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states", "[np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable():", "assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(", "= model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4)))", "6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05)", "4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to not using", "assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x))", "functional API with dynamic length rnn = layers.SimpleRNN samples = 2 dim =", "def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim = 5 timesteps =", "= K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state +", "dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x", "len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x", "self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case.", "Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse',", "range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs =", "outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None])", "'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'),", "* output_dim if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) #", "= Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10,", "def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be set np.random.seed(1234) x", "that need learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization())", "Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1", "x = Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants':", "atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode)", "2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean and variance changed.", "size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to", "stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend()", "y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.')", "dim = 5 timesteps = 3 units = 3 input1 = Input((timesteps, dim))", "np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu'))", "layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) ==", "'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if mode", "== 2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) == 0", "more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel", "assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode',", "epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs)", "y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state of a BiRNN is", "backend does not support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def", "RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x,", "merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() ==", "merge_mode == 'sum': merge_func = lambda y, y_rev: y + y_rev elif merge_mode", "np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one", "'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): #", "reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6),", "serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np,", "constant_shape] = input_shape # will (and should) raise if more than one constant", "np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "__init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self,", "test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model", "= Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) #", "= RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y =", "model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output", "Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64))", "def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) +", "passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0])", "lambda y, y_rev: y * y_rev elif merge_mode == 'ave': merge_func = lambda", "self.built = True def call(self, inputs, states, constants): [prev_output] = states [constant] =", "= Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps,", "dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase", "K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn =", "batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference", "= f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states *", "mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim))", "state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or", "supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for,", "import copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers import", "in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings", "dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd')", "config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model", "c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3,", "y_rev elif merge_mode == 'ave': merge_func = lambda y, y_rev: (y + y_rev)", "layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x))", "@pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test", "in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length rnn", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c], y) model.set_weights(weights)", "Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs", "assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config))", "return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model =", "Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell =", "* output_dim if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs", "def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape]", "Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1,", "to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in", "s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) #", "= self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states,", "merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs)", "# test with functional API with dynamic length rnn = layers.SimpleRNN samples =", "s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np", "i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer", "in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)),", "timesteps, dim)] if merge_mode == 'sum': merge_func = lambda y, y_rev: y +", "s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need", "return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state: passing a tensor input2", "for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2", "== 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer():", "RNN from keras import layers from keras.models import Sequential, Model, model_from_json from keras", "assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0,", "Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() ==", "yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size", "batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3,", "0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None)", "assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map", "constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units,", "= Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell", "y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with", "4, 6)), epochs=1, batch_size=10) # compare to not using batch_input_shape test_input = np.random.randint(5,", "3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config()", "# will (and should) raise if more than one constant passed self.input_kernel =", "0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 =", "Layer from keras.layers import RNN from keras import layers from keras.models import Sequential,", "2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown", "== 0 layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x =", "= model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is", "assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0,", "= Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3,", "input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with", "0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8", "model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2,", "mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0", "initial_state=state) model = Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list)", "as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "from keras.layers import RNN from keras import layers from keras.models import Sequential, Model,", "== 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer", "4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2,", "CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length rnn =", "merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs),", "f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected", "= Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10)", "model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json())", "np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np,", "= Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs)", "y * y_rev elif merge_mode == 'ave': merge_func = lambda y, y_rev: (y", "self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise", "reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test", "'ave': merge_func = lambda y, y_rev: (y + y_rev) / 2 elif merge_mode", "3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional API x", "test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps = 3", "assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if", "import layers from keras.models import Sequential, Model, model_from_json from keras import backend as", "K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged =", "'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim", "lambda y, y_rev: [y, y_rev] # basic case inputs = Input((timesteps, dim)) layer", "layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd')", "loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2", "or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK and", "model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) #", "reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model = Sequential()", "batch_size=10) # compare to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4),", "func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model =", "Test basic case serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3))", "atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim = 5 timesteps", "x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps,", "support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs):", "reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output =", "= to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2 for x1,", "'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): #", "== 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1,", "= layer(input1)[1:] # test passing invalid initial_state: passing a tensor input2 = Input((timesteps,", "y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate),", "= Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs)", "inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2", "np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4)))", "= to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs =", "== 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model", "Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10,", "5 timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs", ":] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs =", "model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'),", "dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid", "s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np,", "uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid])", "== 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates)", "model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output", "in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend()", "of the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward", "wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x", "reason='MXNet backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky", "atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN", "atol=1e-5) # test if the state of a BiRNN is the concatenation of", "dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model =", "== 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need", "Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase", "layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable", "y = np.random.random((samples, target_dim)) # test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim,", "True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or", "model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) )", "Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1", "y, epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test", "self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output] =", "test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "case serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np =", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac,", "model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))],", "loss='mse') # Assert that mean and variance are 0 and 1. td =", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported", "4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3,", "model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode,", "model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop',", "= layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable = False", "+ y_rev) / 2 elif merge_mode == 'concat': merge_func = lambda y, y_rev:", "h_state + h_const return output, [output] def get_config(self): config = {'units': self.units} base_config", "when specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights", "the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward =", "samples = 2 dim = 2 timesteps = 2 output_dim = 2 dropout_rate", "merge_func = lambda y, y_rev: (y + y_rev) / 2 elif merge_mode ==", "self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built", "case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs],", "reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding", "constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np,", "support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model", "2 layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable", "= np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i, i:,", "to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X)", "= wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac,", "not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer", "6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights)", "np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func", "assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers", "1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for", "# verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np", "return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3,", "= {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test", "outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "keras.layers import RNN from keras import layers from keras.models import Sequential, Model, model_from_json", "as np import copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from", "wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs)", "1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func", "all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim)) wrapped =", "[input_shape, constant_shape] = input_shape # will (and should) raise if more than one", "set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights)", "== 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) ==", "s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2,", "output_dim y = np.random.random((samples, target_dim)) # test with Sequential model model = Sequential()", "self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return", "inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() ==", "reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model =", "in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim))", "= y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5)", "= Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert", "= input_shape # will (and should) raise if more than one constant passed", "2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional API x =", "8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if __name__ == '__main__':", "ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2]", "Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output,", "x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs =", "layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) ==", "model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config", "= [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val", "2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) == 0 assert", "input_map has one mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert", "merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model = Model(inputs,", "s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer", "and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model", "@pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim", "test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse')", "y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify", "= Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input)", "model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2,", "5)) c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32)", "list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and should)", "import assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer from", "layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) ==", "3)), epochs=1, batch_size=10) # test with functional API x = Input(shape=(3, 2)) y", "config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model =", "self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel", "batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights =", "== 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so far:", "6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5,", "TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and should) raise if", "np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights", "[model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert", "5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test", "= f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert", "y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner,", "Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3,", "outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs)", "length rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps = 2", "= lambda y, y_rev: y + y_rev elif merge_mode == 'mul': merge_func =", "model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim))", "Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse',", "basic case. x = Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects", "return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32')", "= model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects):", "= wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward", "merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs))", "Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable", "and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential()", "import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and", "config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac],", "= lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev:", "inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs,", "verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np +", "== 'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK and MXNet.') def", "[np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) #", "len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3,", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5)))", "layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False", "custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c)", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json())", "if the state of a BiRNN is the concatenation of the underlying RNNs", "return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state: passing a tensor", "atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y =", "# first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu'))", "dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs],", "0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4", "model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul',", "len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable = True", "2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if", "model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim,", "model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def", "bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert", "test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model,", "mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input >", "= 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)]", "zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() ==", "# Verify input_map has one mapping from inputs to reshaped inputs. uid =", "name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units),", "the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1,", "s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6,", "y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) #", "test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps = 2", "Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3,", "# Test basic case serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6,", "len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) ==", "y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "merge_func = lambda y, y_rev: y + y_rev elif merge_mode == 'mul': merge_func", "= self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel')", "model = model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3,", "def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state", "['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if", ") # Test basic case serialization. x_np = np.random.random((6, 5, 5)) c_np =", "layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x,", "== 'mxnet', reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants():", "= np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config", "model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for", "test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference =", "isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and", "passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units),", "Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0", "in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 =", "= K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X)", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in", "= Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}", "reference_output, atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'),", "y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test", "s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np,", "K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward", "= model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2,", "== 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) ==", "len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2))", "assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units,", "s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4)", "Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd')", "model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4):", "[np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func = lambda y, y_rev: y", "x2, atol=1e-5) # test if the state of a BiRNN is the concatenation", "wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward =", "= layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model =", "y_rev] # basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode)", "= object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) ==", "t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input =", "to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet')", "= [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2),", "32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np =", "[np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization.", "input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) #", "= [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet',", "layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() ==", "is the concatenation of the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward", "0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None)", "need learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _", "def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _", "== 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN", "outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10)", "and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with mask_zero", "td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train", "x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples", "5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3))", "test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim = 5 timesteps = 3", "f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X)", "dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a", "y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10,", "one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight(", "Verify input_map has one mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs)", "model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)]", "return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs],", "Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)),", "i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3,", "f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in", "for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the", "== 2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1',", "dynamic length rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps =", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional API inputs = Input((timesteps,", "assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2)", "= model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer =", "Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects):", "np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed(", "and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) #", "y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y", "yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase to be set x", "stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10,", "np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)),", "# test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim)))", "model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN cell yet')", "far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse')", "x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np])", "np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np = np.random.random((6,", "= model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "= np.random.random((samples, target_dim)) # test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate,", "layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for,", "outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs", "== 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported in", "= model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32'))", "model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.')", "wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0.,", "= np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for", "assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert", "== 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() ==", "not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input)", "cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y", "batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05)", "3, 3)), epochs=1, batch_size=10) # test with functional API x = Input(shape=(3, 2))", "== 1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3,", "4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert", "wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1,", "= 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode ==", "recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode',", "= Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that", "that need learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3))", "== 0 layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2", "y + y_rev elif merge_mode == 'mul': merge_func = lambda y, y_rev: y", "weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y =", "test when specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input)", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x,", "test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units", "np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model", "y_rev elif merge_mode == 'mul': merge_func = lambda y, y_rev: y * y_rev", "np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with", "= Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np])", "x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse')", "= model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights)", "ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer", "'concat' else output_dim y = np.random.random((samples, target_dim)) # test with Sequential model model", "= reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5,", "initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(", "model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) #", "(N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model = model_from_json(model.to_json()) model.summary()", "learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ =", "in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state of a", "not support TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test with Dense", "np.array([1, 1])) # Verify input_map has one mapping from inputs to reshaped inputs.", "# Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert", "name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output] = states [constant]", "2 def test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1'))", "np.zeros((6, 64)) ) # Test basic case serialization. x_np = np.random.random((6, 5, 5))", "= model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10)", "2 dropout_rate = 0.2 for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps,", "y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn,", "learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ =", "= Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward", "== 'mxnet', reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state():", "batch_size=10) # test with functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x)", "2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x,", "outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not", "c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat", "_ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x))", "c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer =", "batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10,", "with unspecified shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True),", "Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell))", "Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3,", "model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2),", "state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum',", "10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean and variance", "2))) # Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0]))", "x = np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if mode ==", "len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase", "layers that need learning_phase to be set x = Input(shape=(3, 2)) layer =", "@pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend()", "c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config =", "stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode))", "first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase():", "'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase", "layers.SimpleRNN samples = 2 dim = 2 timesteps = 2 output_dim = 2", "dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to not using batch_input_shape", "x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim = 5", "input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) #", "= True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk'", "loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1", "len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x)", "ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None", "== 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1,", "3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1,", "that mean and variance are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2],", "timesteps = 3 units = 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units,", "units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape):", "constants shape') [input_shape, constant_shape] = input_shape # will (and should) raise if more", "c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects):", "output = h_input + h_state + h_const return output, [output] def get_config(self): config", "6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) ==", "dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3,", "y, y_rev: y * y_rev elif merge_mode == 'ave': merge_func = lambda y,", "model.summary() # test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps,", "and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1]))", "loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64))", "layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses)", "RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs)", "= to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected):", "5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input],", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model =", "= 2 * output_dim if mode == 'concat' else output_dim y = np.random.random((samples,", "model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4)", "model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10,", "Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with", "model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in", "c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s = model.predict([x_np,", "output_dim if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs =", "else output_dim y = np.random.random((samples, target_dim)) # test with Sequential model model =", "test if the state of a BiRNN is the concatenation of the underlying", "model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np,", "= model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN cell", "recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in", "[prev_output] = states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output,", "1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed(", "s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4)", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c], y)", "np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i, i:, i:]", "== 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model = Sequential()", "input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] #", "support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs):", "self.constant_kernel) output = h_input + h_state + h_const return output, [output] def get_config(self):", "[K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected)", "basic case. x = Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac", "RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x,", "3, 4)) for i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse')", "model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np,", "layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert", "from keras import backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() ==", "dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional", "to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn", "3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func =", "inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x))", "# Test basic case. x = Input((5, 5)) c = Input((3,)) cell =", "keras.models import Sequential, Model, model_from_json from keras import backend as K from keras.utils.generic_utils", "= np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np", "len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'),", "layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i])", "assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples,", "to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5)", "len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if __name__", "wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3,", "model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for", "test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length rnn = layers.SimpleRNN samples", "changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify", "= to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse():", "y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) ==", "mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) # test with Sequential", "= 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps,", "h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output", "[layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0]))", "assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2,", "model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4): model_input[i, i:,", "layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) ==", "+ h_const return output, [output] def get_config(self): config = {'units': self.units} base_config =", "1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet',", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples", "i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for", "Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples,", "K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input", "len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model =", "return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) #", "model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case", "3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np =", "does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers that", "size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean and", "3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) #", "y = layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6,", "'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim =", "elif merge_mode == 'ave': merge_func = lambda y, y_rev: (y + y_rev) /", "test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4)))", "y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 #", "mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1 =", "shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built =", "Model, model_from_json from keras import backend as K from keras.utils.generic_utils import object_list_uid, to_list", "test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps = 3", "layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) ==", "def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size =", "loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential", "layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state:", "y, y_rev: [y, y_rev] # basic case inputs = Input((timesteps, dim)) layer =", "from keras import layers from keras.models import Sequential, Model, model_from_json from keras import", "== 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1,", "model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples =", "test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged =", "+ list(config.items())) # Test basic case. x = Input((5, 5)) c = Input((3,))", "reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed(): # first,", "<filename>tests/keras/layers/wrappers_test.py<gh_stars>100-1000 import pytest import numpy as np import copy from numpy.testing import assert_allclose", "model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1,", "assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps", "inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2", "with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse')", "list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend()", "'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional", "layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) #", "K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return output, [output] def", "= wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1,", "f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)])", "test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be set np.random.seed(1234) x =", "range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() ==", "= y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward):", "model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)),", "units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped", "super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case. x = Input((5,", "model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10)", "= Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6,", "model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs +=", "epochs=1, batch_size=1) # test with functional API inputs = Input((timesteps, dim)) outputs =", "in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend()", "np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1]))", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model = Model([x,", "model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop',", "batch_size=10) # test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model =", "== 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) ==", "= K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged)", "case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np =", "Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test", "np.random.random((samples, target_dim)) # test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate),", "kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4", "= [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1]", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c], y)", "2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model = Sequential()", "5 timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] if", "= constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant,", "self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not", "raise if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform',", "[model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1))", "API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop',", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test", "support TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test with Dense layer", "epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps():", "Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat',", "to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2 for x1, x2", "with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3)))", "4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)),", "merge_func = lambda y, y_rev: y * y_rev elif merge_mode == 'ave': merge_func", "h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return output,", "mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert", "+ y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not", "y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list", "= model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers", "np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4): model_input[i, i:, :] =", "dim)] if merge_mode == 'sum': merge_func = lambda y, y_rev: y + y_rev", "if merge_mode == 'sum': merge_func = lambda y, y_rev: y + y_rev elif", "None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps", "target_dim)) # test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode,", "* 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test", "model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func = lambda", "model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that", "not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will", "test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units", "model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping", "not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units,", "= wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) ==", "= Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "== 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'),", "API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model =", "Test basic case. x = Input((5, 5)) c = Input((3,)) s_for = Input((32,))", "= np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential()", "mean and variance are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0,", "'sum': merge_func = lambda y, y_rev: y + y_rev elif merge_mode == 'mul':", "[np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode)", "= layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np])", "= K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return output, [output]", "model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert", "high=5, size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i, i:, i:] =", "epochs=1, batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn',", "pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list output", "== 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1,", "else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim,", "assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1,", "reason='Unknown timestamps for RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): #", "with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10,", "mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so", "merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional", "assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim =", "y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2 for", "K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np,", "in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 =", "CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c], y)", "= K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward", ") # Test basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np =", "y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer", "= layers.LSTM samples = 2 dim = 5 timesteps = 3 units =", "Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert", "f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) +", "y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers import RNN", "self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case. x = Input((5, 5))", "'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples", "batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val", "model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4)", "dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test", "+ h_state + h_const return output, [output] def get_config(self): config = {'units': self.units}", "output, [output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return", "test passing invalid initial_state: passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError):", "model = Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1)", "x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None))", "= func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0", "valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2],", "CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers import RNN from keras", "= np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np])", "initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output] = states", "dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1,", "y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev]", "y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3, 2)))", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c],", "y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged,", "2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert", "import pytest import numpy as np import copy from numpy.testing import assert_allclose from", "super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape')", "keras import backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet',", "RNN yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase to be set", "timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs", "test_TimeDistributed(): # first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4)))", "assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase to be", "API with dynamic length rnn = layers.SimpleRNN samples = 2 dim = 2", "name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self,", "== 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)]", "from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers, Input,", "= len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected =", "len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses)", "layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3", "numpy as np import copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope", "s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants}", "model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)),", "def test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4)))", "return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i", "'mul': merge_func = lambda y, y_rev: y * y_rev elif merge_mode == 'ave':", "3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4,", "y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config = layer.get_config() with", "3, 4), dtype='int32') for i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input,", "layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1,", "== 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim = 2", "3, 2)), epochs=1, batch_size=10) # test config model.get_config() # test when specifying a", "merge_mode == 'ave': merge_func = lambda y, y_rev: (y + y_rev) / 2", "outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs =", "test config model.get_config() # test when specifying a batch_input_shape test_input = np.random.random((1, 3,", "= np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if mode == 'concat'", "_ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable =", "batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples", "layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X)", "2 output_dim = 2 dropout_rate = 0.2 for mode in ['sum', 'concat']: x", "basic case serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np", "Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu'))", "len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses():", "model.get_config() # test when specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output", "2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are 0 and 1.", "Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output,", "3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model model = Sequential() model.add(layers.Dense(3,", "= layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np])", "3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5,", "RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i],", "model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1)", "to be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x)", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model =", "outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in", "import RNN from keras import layers from keras.models import Sequential, Model, model_from_json from", "'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK", "assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10,", "f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged =", "0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping from", "assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3))", "s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list", "# test with unspecified shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5,", "== 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def", "2 dim = 2 timesteps = 2 output_dim = 2 dropout_rate = 0.2", "model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2):", "model = Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs", "yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers", "= 3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped =", "y_rev: y + y_rev elif merge_mode == 'mul': merge_func = lambda y, y_rev:", "len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim =", "to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1,", "Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() ==", "passing invalid initial_state: passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output", "wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None))", "y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend()", "None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps", "merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional API inputs", "= Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y =", "embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1)", "case. x = Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects =", "be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert", "= np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np =", "state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np])", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac, c],", "y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN cell", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10,", "K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states)", "n_states * 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) #", "32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np,", "a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights = model.layers[0].get_weights()", "input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1,", "epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs =", "# test with functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate,", "X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2,", "np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np", "'_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2,", "3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True,", "import backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps", "Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np,", "outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN", "list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c])", "np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional API x = Input(shape=(3,", "yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None,", "= Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np])", "Input, Layer from keras.layers import RNN from keras import layers from keras.models import", "= np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4): model_input[i, i:, :]", "/ 2 elif merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev),", "atol=1e-4) # verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10.,", "layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)),", "4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN", "and RNN yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase to be", "test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4,", "with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse',", "input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid", "4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers model =", "0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1,", "model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in", "= wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3,", "= Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input)", "loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model =", "import numpy as np import copy from numpy.testing import assert_allclose from keras.utils import", "layers from keras.models import Sequential, Model, model_from_json from keras import backend as K", "layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional():", "variance are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert", "model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input", "does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK", "wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state)", "cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units", "= model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights)", "= ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN", "test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for,", "None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs)))", "recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config", "def __init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def", "input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for", "input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test config model.get_config() model", "y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev] #", "s_bac, c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np,", "inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x,", "list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers) ==", "= Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful", "layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert", "4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() #", "0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]:", "Test basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32))", "backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "# test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x,", "custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for,", "and RNN yet') def test_TimeDistributed(): # first, test with Dense layer model =", "units = 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func", "= False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable = True", "K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1))", "not support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units,", "'mxnet'), reason='Unknown timestamps for RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape():", "x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y", "= np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference", "weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse')", "epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM", "import CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers import RNN from", "y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y", "concatenation of the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:]", "name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are 0", "model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config() #", "copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers,", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c],", "= lambda y, y_rev: [y, y_rev] # basic case inputs = Input((timesteps, dim))", "inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs,", "wrappers, Input, Layer from keras.layers import RNN from keras import layers from keras.models", "CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with", "assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]),", "model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional API", "32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization.", "len(y_merged) == len(y_expected) + n_states * 2 for x1, x2 in zip(y_merged, y_expected):", "x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) ==", "model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used", "BiRNN is the concatenation of the underlying RNNs y_merged = y_merged[-n_states * 2:]", "4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10)", "[constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const =", "[output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items())", "the state of a BiRNN is the concatenation of the underlying RNNs y_merged", "merge_func = lambda y, y_rev: [y, y_rev] # basic case inputs = Input((timesteps,", "inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None))", "y, epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs", "test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop',", "4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) #", "= wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state: passing", "Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input =", "= model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x,", "i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer", "np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping from inputs to reshaped", "RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for", "self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform',", "= 2 dim = 5 timesteps = 3 units = 3 input1 =", "wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(", "len(y_expected) + n_states * 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2,", "RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test", "x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32))", "= Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend()", "batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input],", "self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform',", "2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet')", "[ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is", "build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] =", "2 * output_dim if mode == 'concat' else output_dim y = np.random.random((samples, target_dim))", "shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel =", "_ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) ==", "dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1)", "samples = 2 dim = 5 timesteps = 3 units = 3 X", "s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)),", "model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so far: (N, t_1, t_2,", "test_TimeDistributed_trainable(): # test layers that need learning_phase to be set x = Input(shape=(3,", "functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model", "y_rev: (y + y_rev) / 2 elif merge_mode == 'concat': merge_func = lambda", "= np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config()", "model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that", "= Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6", "'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim = 5", "model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are 0 and 1. td", "layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1,", "5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2),", "layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse')", "yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size", "and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1]))", "assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable =", "cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units", "for i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1,", "wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable", "np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case", "# test layers that need learning_phase to be set x = Input(shape=(3, 2))", "= {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model", "in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs", "ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6 assert", "output_dim if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) # test", "custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units", "elif merge_mode == 'mul': merge_func = lambda y, y_rev: y * y_rev elif", "* 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged,", "with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse')", "loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config()", "units = 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state", "from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "x in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode)", "= Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10,", "timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode", "initial_state: passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2,", "= layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) ==", "model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model", "model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model", "model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare", "# test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential()", "5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config", "self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input +", "len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0],", "np.random.random((6, 5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np =", "assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim)) wrapped", "weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse')", "wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing invalid initial_state: passing a", "size=(10, 3, 4), dtype='int32') for i in range(4): model_input[i, i:, i:] = 0", "flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac,", "unspecified shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None,", "test layers that need learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2))", "= 2 timesteps = 2 output_dim = 2 dropout_rate = 0.2 for mode", "'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed(): #", "> 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2", "for x in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True),", "batch_size=1) # test with functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim,", "basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged =", "layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop',", "= layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop',", "assert len(model.losses) == 1 def test_Bidirectional(): rnn = layers.SimpleRNN samples = 2 dim", "4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None)", "== 'sum': merge_func = lambda y, y_rev: y + y_rev elif merge_mode ==", "for x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X))", "does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self,", "backend does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def", "y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1,", "= wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac,", "model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses)", "passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert", "= units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape,", "= Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward", "dropout_rate = 0.2 for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim))", "y = layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac, c], y)", "custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units", "layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert", "64)) ) # Test basic case serialization. x_np = np.random.random((6, 5, 5)) c_np", "t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1,", "outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3,", "np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference =", "epochs=1, batch_size=10) # test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model", "test with functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate),", "= 2 dropout_rate = 0.2 for mode in ['sum', 'concat']: x = np.random.random((samples,", "loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4), dtype='int32') for i in range(4):", "x = Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac = Input((32,))", "units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list):", "len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x)", "# second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3):", "in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer):", "2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not", "2 dim = 5 timesteps = 3 units = 3 input1 = Input((timesteps,", "= Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1,", "= 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state =", "underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:]", "training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim))", "layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)])", "= model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu'))", "x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) ==", "= model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s,", "with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input", "input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn =", "inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs,", "layers that need learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2)) y", "y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1,", "loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) #", "Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward =", "a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) #", "invalid initial_state: passing a tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output =", "3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:]", "base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case. x", "pytest import numpy as np import copy from numpy.testing import assert_allclose from keras.utils", "c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) )", "np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "2 dim = 5 timesteps = 3 units = 3 X = [np.random.rand(samples,", "reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2,", "0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) == 2 assert", "def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with mask_zero model =", "inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs,", "reference_output, atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3,", "None))) # the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8,", "Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert", "# test config model.get_config() model = model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers", "assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates)", "np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim if mode == 'concat' else", "batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim,", "c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def", "4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4,", "s_for, s_bac, c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_3 =", "np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10,", "layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np,", "assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3,", "f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert", "3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1, 2, 4, 4,", "dim = 5 timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps,", "= [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func = lambda y, y_rev:", "ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first", "= Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so far: (N,", "atol=1e-5) @pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat',", "x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM", "RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val = [ref_mask_val_0,", "32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights =", "axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "= func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model", "model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c)", "np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean and variance changed. assert", "6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'), np.random.random((10,", "import wrappers, Input, Layer from keras.layers import RNN from keras import layers from", "0 layer.trainable = True assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend()", "stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model =", "model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3, 4), dtype='int32'),", "= wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert", "1])) # Verify input_map has one mapping from inputs to reshaped inputs. uid", "with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape", "to be set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x)", "np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1,", "= to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2,", "backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be set np.random.seed(1234)", "the concatenation of the underlying RNNs y_merged = y_merged[-n_states * 2:] y_forward =", "constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5,", "are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3],", "functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x, y)", "np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights =", "'mxnet', reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class", "model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6)", "2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3,", "input_shape=(None, None))) # the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True)))", "assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential()", "4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model model =", "'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode):", "== 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x", "assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert", "s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6,", "else: merge_func = lambda y, y_rev: [y, y_rev] # basic case inputs =", "# test when specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output =", "inputs, states, constants): [prev_output] = states [constant] = constants h_input = K.dot(inputs, self.input_kernel)", "inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) == 6", "+ 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test", "len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend()", "constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6,", "True def call(self, inputs, states, constants): [prev_output] = states [constant] = constants h_input", "RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model", "assert_allclose(x1, x2, atol=1e-5) # test if the state of a BiRNN is the", "Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)),", "= layer([x, s_for, s_bac, c]) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights)", "3, 4, 6)), epochs=1, batch_size=10) # compare to not using batch_input_shape test_input =", "# test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4,", "= wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2", "y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2 for x1, x2 in", "Assert that mean and variance are 0 and 1. td = model.layers[0] assert", "wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input,", "wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for", "with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer", "# Test basic case. x = Input((5, 5)) c = Input((3,)) s_for =", "constants): [prev_output] = states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state =", "== 'concat' else output_dim y = np.random.random((samples, target_dim)) # test with Sequential model", "= wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase')", "2, 4, 4, 3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary()", "target_dim = 2 * output_dim if mode == 'concat' else output_dim y =", "scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are", "= f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2", "model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6,", "inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model", "assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs =", "s_for, s_bac, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_3,", "== 8 assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if __name__ ==", "@pytest.mark.skipif(K.backend() == 'theano' or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None])", "reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to", "reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples =", "atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase to be set x", "4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i", "config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) #", "wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c],", "uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "= y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner", "4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers model = Sequential()", "Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1)", "outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X))", "= Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with", "3 units = 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True))", "(y + y_rev) / 2 elif merge_mode == 'concat': merge_func = lambda y,", "f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged)", "wrappers.TimeDistributed(model)(x) outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)),", "= Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1,", "if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape #", "c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) #", "test layers that need learning_phase to be set x = Input(shape=(3, 2)) layer", "kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) ==", "y2 = to_list(model.predict(X)) for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def", "len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() ==", "assert len(layer.get_losses_for(None)) == 6 assert len(layer.get_losses_for(x)) == 2 if __name__ == '__main__': pytest.main([__file__])", "= np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights", "== 'mul': merge_func = lambda y, y_rev: y * y_rev elif merge_mode ==", "np.random.random((10, 3, 4, 6)), epochs=1, batch_size=10) # compare to not using batch_input_shape test_input", "len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps,", "(and should) raise if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1],", "dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "# test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop',", "a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers)", "+ y_rev elif merge_mode == 'mul': merge_func = lambda y, y_rev: y *", "h_const return output, [output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants,", "support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase", "activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) == 1 def test_Bidirectional(): rnn", "config model.get_config() # test when specifying a batch_input_shape test_input = np.random.random((1, 3, 4))", "backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with", "y = layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y)", "BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') #", "that need learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2)) y =", "= wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward", "assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0,", "= Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend()", "from keras.layers import wrappers, Input, Layer from keras.layers import RNN from keras import", "second RNN layer ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert", "timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend", "False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert", "len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates)", "reference = Sequential() reference.add(wrappers.TimeDistributed(layers.Dense(2), batch_input_shape=(1, 3, 4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output =", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model =", "assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1 model", "= 3 X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func =", "support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def", "== 2 layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0", "merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test", "3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units,", "= wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in", "2 elif merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1)", "for RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with", "rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet')", "dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom", "specifying a batch_input_shape test_input = np.random.random((1, 3, 4)) test_output = model.predict(test_input) weights =", "reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map", "to be set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model", "model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs", "not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers that need", "assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2 def test_Bidirectional_losses(): x = Input(shape=(3,", "y = layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np,", "RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units =", "backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with", "training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y), 0., atol=1e-1,", "epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func =", "4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None)", "to_list(wrapped(inputs, training=True)) assert all(not getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps,", "+ 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs", "assert len(y_merged) == len(y_expected) + n_states * 2 for x1, x2 in zip(y_merged,", "X = [np.random.rand(samples, timesteps, dim)] if merge_mode == 'sum': merge_func = lambda y,", "= model_from_json(model.to_json()) model.summary() # test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True),", "x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state", "# test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32'))", "should) raise if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units),", "5, 5)) s_fw_np = np.random.random((6, 32)) s_bk_np = np.random.random((6, 32)) c_np = np.random.random((6,", "5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights()", "epochs=1, batch_size=10) # test with functional API x = Input(shape=(3, 2)) y =", "y_rev: [y, y_rev] # basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units,", "y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0]))", "padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)),", "= lambda y, y_rev: y * y_rev elif merge_mode == 'ave': merge_func =", "model.summary() # test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu'))", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5,", "1 model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert", "['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim =", "case. x = Input((5, 5)) c = Input((3,)) s_for = Input((32,)) s_bac =", "1]), (1, 10, 2))) # Assert that mean and variance changed. assert not", "def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size =", "or K.backend() == 'mxnet', reason='Not supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn", "merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y,", "ref_mask_val = [ref_mask_val_0, ref_mask_val_1, ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert", "3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model =", "model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test", "with dynamic length rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps", "Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean", "test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with mask_zero model = Sequential()", "c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet',", "(2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4,", "lambda y, y_rev: (y + y_rev) / 2 elif merge_mode == 'concat': merge_func", "= reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5,", "RNN yet') def test_TimeDistributed(): # first, test with Dense layer model = Sequential()", "assert len(layer.updates) == 2 assert len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend()", "2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4,", "y, y_rev: (y + y_rev) / 2 elif merge_mode == 'concat': merge_func =", "layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable =", "= 3 units = 3 input1 = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True,", "loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional", "MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified shape and Embeddings with mask_zero model", "timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd')", "numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer", "test_Bidirectional_losses(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ =", "constants=c) model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2,", "assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0,", "Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell))", "lambda y, y_rev: y + y_rev elif merge_mode == 'mul': merge_func = lambda", "K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward =", "Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6, mask_zero=True), input_shape=(None, None))) # the shape so far: (N, t_1,", "0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs", "zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer", "mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2 *", "y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in", "loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization", "TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test with Dense layer model", "model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim))) model.compile(loss='mse', optimizer='sgd') model.fit(x,", "np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev] # basic", "axis=-1) else: merge_func = lambda y, y_rev: [y, y_rev] # basic case inputs", "shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output]", "2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers model", "layer(x, initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2", "layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) ==", "mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1,", "= Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage:", "K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not", "CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be set", "(None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN", "= layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights) == 0", "= K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected =", "set x = Input(shape=(3, 2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates)", "3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np = np.random.random((6, 5,", "supported.') @pytest.mark.parametrize('merge_mode', ['sum', 'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2", "raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and should) raise", "used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError):", "5, 5)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic case serialization. x_np", "model_from_json from keras import backend as K from keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend()", "f_backward(X) y_expected = to_list(merge_func(y_forward[0], y_backward[0])) assert len(y_merged) == len(y_expected) + n_states * 2", "in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim = 2 * output_dim", "timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with", "layers.LSTM samples = 2 dim = 5 timesteps = 3 units = 3", "with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that need learning_phase to be", "of a BiRNN is the concatenation of the underlying RNNs y_merged = y_merged[-n_states", "= K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input,", "model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and", "= 5 timesteps = 3 units = 3 input1 = Input((timesteps, dim)) layer", "@pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): # test layers that", "# test if the state of a BiRNN is the concatenation of the", "has one mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys())", "with functional API inputs = Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs)", "input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance are 0 and", "= Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert len(layer.get_updates_for(None)) ==", "atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2,", "model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, initial_state=[s_for,", "bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim), merge_mode=mode)) model.compile(loss='mse',", "== len(y_expected) + n_states * 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1,", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # Bidirectional and stateful inputs = Input(batch_shape=(1,", "Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) #", "merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum',", "inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None))", "test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert", "model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4))", "model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 # first RNN layer", "not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has", "== 6 layer.trainable = False assert len(layer.trainable_weights) == 0 layer.trainable = True assert", "TimeDistributed and RNN yet') def test_TimeDistributed_with_masking_layer(): # test with Masking layer model =", "assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np", "model = Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4)", "layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x) layer.backward_layer.add_loss(1, inputs=None) assert len(layer.losses) == 8 assert len(layer.get_losses_for(None)) ==", "model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs = [model.layers[0].compute_mask(model.input)] mask_outputs += [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])]", "zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2", "'concat' else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs =", "samples = 2 dim = 5 timesteps = 3 units = 3 input1", "s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s", "np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked layers", "1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses) == 1", "{'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic", "= Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)),", "tensor input2 = Input((timesteps, dim)) with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test", "= {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac],", "state = layer(input1)[1:] # test passing invalid initial_state: passing a tensor input2 =", "rnn = layers.SimpleRNN samples = 2 dim = 2 timesteps = 2 output_dim", "= 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in", "mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1", "learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True)", "outer_model = Model(x, y) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1,", "high=5, size=(10, 3, 4)) for i in range(4): model_input[i, i:, :] = 0.", "4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config() # test when", "one mapping from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) ==", "assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test", "layer(x) assert len(layer.losses) == 4 assert len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0", "return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs],", "y_np_2 = model.predict([x_np, s_fw_np, s_bk_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # verify that state", "test with functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model =", "0.2 for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim =", "c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y", "output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers) == 4", "model.predict([x_np, c_np]) assert_allclose(y_np, y_np_2, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer", "# Test basic case serialization. x_np = np.random.random((6, 5, 5)) s_fw_np = np.random.random((6,", "np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2,", "layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2))) model.compile(optimizer='rmsprop', loss='mse') # Assert that mean and variance", "Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants':", "all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 =", "outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with BatchNormalization model", "and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend') def test_TimeDistributed_learning_phase(): #", "3 units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs = Input((timesteps, dim))", "assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2),", "Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward =", "def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length rnn = layers.SimpleRNN", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) # test with functional API inputs =", "len(layer.updates) == 0 assert len(layer.get_updates_for(None)) == 0 assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x)", "supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic length", "np import copy from numpy.testing import assert_allclose from keras.utils import CustomObjectScope from keras.layers", "mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1", "np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y)", "np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights() config =", "assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5)", "dict(list(base_config.items()) + list(config.items())) # Test basic case. x = Input((5, 5)) c =", "+ n_states * 2 for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5)", "Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test passing", "constants h_input = K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel)", "getattr(x, '_uses_learning_phase') for x in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units,", "3, 4)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model model", "== 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs", "== 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable():", "need learning_phase to be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _", "list(config.items())) # Test basic case. x = Input((5, 5)) c = Input((3,)) s_for", "0 layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3,", "from inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert", "= K.dot(inputs, self.input_kernel) h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output =", "RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test with unspecified", "h_state = K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state", "y_rev: y * y_rev elif merge_mode == 'ave': merge_func = lambda y, y_rev:", "Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2 = to_list(model.predict(X)) for x1, x2", "for state_birnn, state_inner in zip(y_merged, y_forward + y_backward): assert_allclose(state_birnn, state_inner, atol=1e-5) @pytest.mark.skipif(K.backend() ==", "batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): #", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10,", "model.predict([x_np, c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "assert len(layer.get_updates_for(x)) == 0 layer.forward_layer.add_update(0, inputs=x) layer.forward_layer.add_update(1, inputs=None) layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert", "K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer", "= True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer", "with functional API with dynamic length rnn = layers.SimpleRNN samples = 2 dim", "dim)) target_dim = 2 * output_dim if mode == 'concat' else output_dim y", "= Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave',", "list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model =", "@pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def", "model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None))", "from keras.utils import CustomObjectScope from keras.layers import wrappers, Input, Layer from keras.layers import", "test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse')", "test stacked bidirectional layers model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, return_sequences=True), merge_mode=mode, input_shape=(timesteps, dim))) model.add(wrappers.Bidirectional(rnn(output_dim),", "4))) reference.add(layers.Activation('relu')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test", "assert len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates)", "timestamps for RNN not supported in CNTK and MXNet.') def test_TimeDistributed_with_masked_embedding_and_unspecified_shape(): # test", "reason='Unknown timestamps not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API", "np.random.random((10, 3, 2)), epochs=1, batch_size=10) # test config model.get_config() # test when specifying", "= Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) ==", "# test with functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model", "units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants", "model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config))", "target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model =", "= Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert", "reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test", "model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def", "= wrappers.Bidirectional(RNN(cell)) y = layer(x, constants=c) model = Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse')", "layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs))", "assert_allclose(y_np, y_np_3, atol=1e-4) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support custom RNN", "True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer =", "== 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func =", "2 timesteps = 2 output_dim = 2 dropout_rate = 0.2 for mode in", "i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10)", "Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2)))", "a BiRNN is the concatenation of the underlying RNNs y_merged = y_merged[-n_states *", "recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) #", "# test with BatchNormalization model = Sequential() model.add(wrappers.TimeDistributed( layers.BatchNormalization(center=True, scale=True), name='bn', input_shape=(10, 2)))", "np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test wrapping Sequential model model = Sequential()", "def test_TimeDistributed(): # first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3,", "initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1],", "K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0],", "is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1,", "yet') def test_TimeDistributed(): # first, test with Dense layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2),", "# test stacked layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN", "assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet", "model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn", "merge_mode == 'mul': merge_func = lambda y, y_rev: y * y_rev elif merge_mode", "'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda", "5)) c = Input((3,)) cell = RNNCellWithConstants(32) custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects):", "assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None # final layer @pytest.mark.skipif(K.backend() == 'mxnet',", "test with Sequential model model = Sequential() model.add(wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode, input_shape=(timesteps, dim)))", "== 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') def test_TimeDistributed():", "if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) inputs = Input((None,", "4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test", "wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert len(layer.trainable_weights) == 6 layer.trainable = False assert len(layer.trainable_weights)", "inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert", "model.fit(model_input, np.random.random((10, 1)), epochs=1, batch_size=10) mask_outputs = [model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input,", "set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x,", "if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel')", "2)) layer = wrappers.TimeDistributed(layers.BatchNormalization()) _ = layer(x) assert len(layer.updates) == 2 assert len(layer.trainable_weights)", "return output, [output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config()", "wrappers.Bidirectional(rnn(units, return_sequences=True), merge_mode=merge_mode) f_merged = K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward =", "Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert", "layer.trainable = True assert len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2))", "# test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model =", "func([model_input]) ref_mask_val_0 = model_input > 0 # embedding layer ref_mask_val_1 = ref_mask_val_0 #", "atol=1e-05) # test with Embedding model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4),", "be set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model =", "for x1, x2 in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn =", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Conv2D(5, (2, 2), padding='same'), input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop',", "= super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) # Test basic case. x =", "layer.backward_layer.add_update(0, inputs=x) layer.backward_layer.add_update(1, inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert", "dim = 2 timesteps = 2 output_dim = 2 dropout_rate = 0.2 for", "assert all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1", "3)), epochs=1, batch_size=10) # test wrapping Sequential model model = Sequential() model.add(layers.Dense(3, input_dim=2))", "# test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged", "10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with", "= K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0 = model_input > 0 # embedding", "y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown timestamps not supported in CNTK.') def", "= self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel')", "not np.array_equal(td.get_weights()[3], np.array([1, 1])) # Verify input_map has one mapping from inputs to", "self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants):", "zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state of a BiRNN", "# Bidirectional and stateful inputs = Input(batch_shape=(1, timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True),", "reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D", "dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs)) assert all(x._uses_learning_phase for", "10., s_bk_np + 10., c_np]) with pytest.raises(AssertionError): assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat", "layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet')", "[y, y_rev] # basic case inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_sequences=True),", "outputs = wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y,", "variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3], np.array([1, 1])) #", "# test layers that need learning_phase to be set np.random.seed(1234) x = Input(shape=(3,", "n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward = f_forward(X) y_backward = f_backward(X) y_expected", "y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn =", "Test basic case. x = Input((5, 5)) c = Input((3,)) cell = RNNCellWithConstants(32)", "len(layer.trainable_weights) == 2 @pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for", "[model.layers[0].compute_mask(model.input)] for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val =", "len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1 assert len(model.layers[0].get_losses_for(None)) == 1 assert len(model.losses)", "test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0 assert", "optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode):", "using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights", "isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps, dim), np.random.rand(samples, timesteps, dim)] outputs = model.predict(inputs)", "y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer =", "@pytest.mark.parametrize('merge_mode', ['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples =", "list(config.items())) # Test basic case. x = Input((5, 5)) c = Input((3,)) cell", "= Input((timesteps, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate, recurrent_dropout=dropout_rate), merge_mode=mode)(inputs) model = Model(inputs, outputs)", "layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged", "axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, kernel_regularizer='l1'),", "= h_input + h_state + h_const return output, [output] def get_config(self): config =", "assert_allclose(y_np, y_np_2_different_s, atol=1e-4) # test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config))", "self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True", "= Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10,", "assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1,", "states, constants): [prev_output] = states [constant] = constants h_input = K.dot(inputs, self.input_kernel) h_state", "test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights()", "and variance are 0 and 1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0]))", "import Sequential, Model, model_from_json from keras import backend as K from keras.utils.generic_utils import", "y, y_rev: y + y_rev elif merge_mode == 'mul': merge_func = lambda y,", "# Assert that mean and variance are 0 and 1. td = model.layers[0]", "not supported in CNTK.') def test_Bidirectional_dynamic_timesteps(): # test with functional API with dynamic", "with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c]) model = Model([x, c],", "= True def call(self, inputs, states, constants): [prev_output] = states [constant] = constants", "'mxnet', reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants(): class", "== 'ave': merge_func = lambda y, y_rev: (y + y_rev) / 2 elif", "Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5))) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3,", "= False assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.trainable_weights) == 6", "= wrappers.TimeDistributed(layers.Dropout(.999))(x, training=True) model = Model(x, y) y = model.predict(np.random.random((10, 3, 2))) assert_allclose(np.mean(y),", "from keras.models import Sequential, Model, model_from_json from keras import backend as K from", "so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop',", "y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples = 2 dim", "size=(10, 3, 4), dtype='int32') test_output = model.predict(test_input) weights = model.layers[0].get_weights() reference = Sequential()", "K.dot(prev_output, self.recurrent_kernel) h_const = K.dot(constant, self.constant_kernel) output = h_input + h_state + h_const", "= Input((3,)) s_for = Input((32,)) s_bac = Input((32,)) cell = RNNCellWithConstants(32) custom_objects =", "1. td = model.layers[0] assert np.array_equal(td.get_weights()[2], np.array([0, 0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) #", "test with functional API with dynamic length rnn = layers.SimpleRNN samples = 2", "2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn, state_inner in zip(y_merged, y_forward", "mask_zero=True), input_shape=(None, None))) # the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7,", "= Sequential() model.add(wrappers.TimeDistributed( layers.Dense(2, activity_regularizer='l1'), input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.losses) ==", "layers model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3,", "in outputs) inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs", "if mode == 'concat' else output_dim y = np.random.random((samples, target_dim)) # test with", "layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states = len(layer.layer.states) y_merged = f_merged(X) y_forward =", "initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def", "def call(self, inputs, states, constants): [prev_output] = states [constant] = constants h_input =", "= to_list(wrapped(inputs)) assert all(x._uses_learning_phase for x in outputs) model = Model(inputs, outputs) assert", "Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not", "inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs))", "inputs to reshaped inputs. uid = object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid", "merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func", "def test_Bidirectional_trainable(): # test layers that need learning_phase to be set x =", "Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)), np.random.random((10, 3,", "+= [model.layers[1].compute_mask(model.layers[1].input, mask_outputs[-1])] func = K.function([model.input], mask_outputs) mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input,", "test_TimeDistributed_with_masking_layer(): # test with Masking layer model = Sequential() model.add(wrappers.TimeDistributed(layers.Masking(mask_value=0.,), input_shape=(None, 4))) model.add(wrappers.TimeDistributed(layers.Dense(5)))", "for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test return_state inputs", "= Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True, return_sequences=True)) state = layer(input1)[1:] # test", "initial_state=[s_for, s_bac], constants=c) model = Model([x, s_for, s_bac, c], y) model.set_weights(weights) y_np_2 =", "need learning_phase to be set np.random.seed(1234) x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(layers.Dropout(.999))(x,", "x in outputs) model = Model(inputs, outputs) assert model.uses_learning_phase y1 = to_list(model.predict(X)) y2", "layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses) == 4", "RNN cell yet') def test_Bidirectional_with_constants(): class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units =", "mask_outputs_val = func([model_input]) assert np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers():", "== len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test", "self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs,", "self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight(", "5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 64)) ) # Test basic", "object_list_uid(model.inputs) assert len(td._input_map.keys()) == 1 assert uid in td._input_map assert K.int_shape(td._input_map[uid]) == (None,", "reason='MXNet backend does not support custom RNN cell yet') def test_Bidirectional_with_constants_layer_passing_initial_state(): class RNNCellWithConstants(Layer):", "return_state=True), merge_mode=merge_mode) f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs],", "== 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() ==", "input_shape=(3, 4))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') assert len(model.layers[0].layer.losses) == 1 assert len(model.layers[0].losses) == 1", "= K.function([inputs], to_list(layer(inputs))) f_forward = K.function([inputs], [layer.forward_layer.call(inputs)]) f_backward = K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged", "in zip(y1, y2): assert_allclose(x1, x2, atol=1e-5) def test_Bidirectional_state_reuse(): rnn = layers.LSTM samples =", "= Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1) @pytest.mark.skipif((K.backend() == 'cntk'), reason='Unknown", "test flat list inputs with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, c])", "model = Sequential() model.add(wrappers.TimeDistributed(layers.Dense(2), input_shape=(3, 4))) model.add(wrappers.TimeDistributed(layers.Dense(3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.random((10, 3, 4)),", "def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert len(layer.updates) == 0", "timesteps = 3 units = 3 X = [np.random.rand(samples, timesteps, dim)] inputs =", "len(layer.updates) == 0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) ==", "= 0.2 for mode in ['sum', 'concat']: x = np.random.random((samples, timesteps, dim)) target_dim", "loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model", "layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input]) ref_mask_val_0", "dtype='int32') for i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10, 1)),", "(1, 10, 2))) # Assert that mean and variance changed. assert not np.array_equal(td.get_weights()[2],", "= Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6), batch_input_shape=(10, 3, 4), dtype='int32')) model.compile(optimizer='rmsprop', loss='mse') model.fit(np.random.randint(5, size=(10, 3,", "def test_TimeDistributed_trainable(): # test layers that need learning_phase to be set x =", "3)), np.random.random((1, 2, 4, 4, 5))) model = model_from_json(model.to_json()) model.summary() # test stacked", "call(self, inputs, states, constants): [prev_output] = states [constant] = constants h_input = K.dot(inputs,", "be set x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) _ = layer(x) assert", "np.array_equal(mask_outputs_val[0], np.any(model_input, axis=-1)) assert np.array_equal(mask_outputs_val[1], np.any(model_input, axis=-1)) def test_regularizers(): model = Sequential() model.add(wrappers.TimeDistributed(", "that mean and variance changed. assert not np.array_equal(td.get_weights()[2], np.array([0, 0])) assert not np.array_equal(td.get_weights()[3],", "**kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if", "= 2 dim = 2 timesteps = 2 output_dim = 2 dropout_rate =", "with functional API x = Input(shape=(3, 2)) y = wrappers.TimeDistributed(model)(x) outer_model = Model(x,", "y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6, 3))],", "loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Conv2D model", "timesteps, dim)] outputs = model.predict(inputs) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support", "5 timesteps = 3 units = 3 input1 = Input((timesteps, dim)) layer =", "y, epochs=1, batch_size=1) # test with functional API inputs = Input((timesteps, dim)) outputs", "object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and RNN", "initial_state=state[0]) # test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model", "['sum', 'mul', 'ave', 'concat', None]) def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2", "model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False))) model.add(layers.SimpleRNN(1, return_sequences=False)) model.compile(optimizer='rmsprop', loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4),", "dim)] inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs =", "keras.layers import wrappers, Input, Layer from keras.layers import RNN from keras import layers", "elif merge_mode == 'concat': merge_func = lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else:", "first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second RNN layer ref_mask_val =", "0., atol=1e-1, rtol=1e-1) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and", "= Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, recurrent_dropout=0.2), merge_mode=merge_mode) outputs = to_list(wrapped(inputs, training=True))", "inputs=None) assert len(layer.updates) == 4 assert len(layer.get_updates_for(None)) == 2 assert len(layer.get_updates_for(x)) == 2", "does not support TimeDistributed and RNN yet') def test_TimeDistributed(): # first, test with", "== (None, 2) @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and", "K.function([inputs], [K.reverse(layer.backward_layer.call(inputs), 1)]) y_merged = f_merged(X) y_expected = to_list(merge_func(f_forward(X)[0], f_backward(X)[0])) assert len(y_merged) ==", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer(x, constants=c) model = Model([x, c], y) model.set_weights(weights)", "len(layer.get_losses_for(None)) == 4 assert len(layer.get_losses_for(x)) == 0 layer.forward_layer.add_loss(0, inputs=x) layer.forward_layer.add_loss(1, inputs=None) layer.backward_layer.add_loss(0, inputs=x)", "model.fit(x, y, epochs=1, batch_size=1) # test with functional API inputs = Input((timesteps, dim))", "output_dim y = np.random.random((samples, target_dim)) inputs = Input((None, dim)) outputs = wrappers.Bidirectional(rnn(output_dim, dropout=dropout_rate,", "inputs = Input((timesteps, dim)) wrapped = wrappers.Bidirectional(rnn(units, dropout=0.2, return_state=True), merge_mode=merge_mode) outputs = to_list(wrapped(inputs))", "= wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list output = wrappers.Bidirectional(rnn(units))(input2,", "Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1,", "@pytest.mark.skipif((K.backend() == 'cntk' or K.backend() == 'mxnet'), reason='Unknown timestamps for RNN not supported", "4), dtype='int32') for i in range(4): model_input[i, i:, i:] = 0 model.fit(model_input, np.random.random((10,", "input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10,", "= units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects", "input_shape=(2, 4, 4, 3))) model.add(layers.Activation('relu')) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.random.random((1, 2, 4, 4, 3)), np.random.random((1,", "x = Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x)", "y_merged = y_merged[-n_states * 2:] y_forward = y_forward[-n_states:] y_backward = y_backward[-n_states:] for state_birnn,", "# the shape so far: (N, t_1, t_2, 6) model.add(wrappers.TimeDistributed(layers.SimpleRNN(7, return_sequences=True))) model.add(wrappers.TimeDistributed(layers.SimpleRNN(8, return_sequences=False)))", "class RNNCellWithConstants(Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants,", "loss='mse') model_input = np.random.randint(low=1, high=5, size=(10, 3, 4)) for i in range(4): model_input[i,", "rnn = layers.LSTM samples = 2 dim = 5 timesteps = 3 units", "0])) assert np.array_equal(td.get_weights()[3], np.array([1, 1])) # Train model.train_on_batch(np.random.normal(loc=2, scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0,", "to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output =", "Sequential, Model, model_from_json from keras import backend as K from keras.utils.generic_utils import object_list_uid,", "return dict(list(base_config.items()) + list(config.items())) # Test basic case. x = Input((5, 5)) c", "layer(input1)[1:] # test passing invalid initial_state: passing a tensor input2 = Input((timesteps, dim))", "compare to not using batch_input_shape test_input = np.random.randint(5, size=(10, 3, 4), dtype='int32') test_output", "c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_fw_np, s_bk_np, c_np]) weights = model.get_weights()", "'concat', None]) def test_Bidirectional_dropout(merge_mode): rnn = layers.LSTM samples = 2 dim = 5", "f_merged = K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states", "model = Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse')", "size=(10, 3, 4)) for i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop',", "wrappers.Bidirectional(rnn(output_dim, stateful=True), merge_mode=mode)(inputs) model = Model(inputs, outputs) model.compile(loss='mse', optimizer='sgd') model.fit(x, y, epochs=1, batch_size=1)", "x2, atol=1e-5) # test return_state inputs = Input((timesteps, dim)) layer = wrappers.Bidirectional(rnn(units, return_state=True),", "f_backward(X)[0])) assert len(y_merged) == len(y_expected) for x1, x2 in zip(y_merged, y_expected): assert_allclose(x1, x2,", "6, mask_zero=True), input_shape=(None, None))) # the shape so far: (N, t_1, t_2, 6)", "y = layer([x, c]) model = Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np,", "{'RNNCellWithConstants': RNNCellWithConstants} with CustomObjectScope(custom_objects): layer = wrappers.Bidirectional(RNN(cell)) y = layer(x, initial_state=[s_for, s_bac], constants=c)", "will (and should) raise if more than one constant passed self.input_kernel = self.add_weight(", "4)) for i in range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input,", "ref_mask_val_2] for i in range(3): assert np.array_equal(mask_outputs_val[i], ref_mask_val[i]) assert mask_outputs[-1] is None #", "test with unspecified shape and Embeddings with mask_zero model = Sequential() model.add(wrappers.TimeDistributed(layers.Embedding(5, 6,", "outer_model.fit(np.random.random((10, 3, 2)), np.random.random((10, 3, 3)), epochs=1, batch_size=10) # test with functional API", "is used y_np_2_different_s = model.predict([x_np, s_fw_np + 10., s_bk_np + 10., c_np]) with", "# final layer @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed and", "= K.function([inputs], layer(inputs)) f_forward = K.function([inputs], layer.forward_layer.call(inputs)) f_backward = K.function([inputs], layer.backward_layer.call(inputs)) n_states =", "input2], output) assert len(model.layers) == 4 assert isinstance(model.layers[-1].input, list) inputs = [np.random.rand(samples, timesteps,", "reference.add(wrappers.TimeDistributed(layers.Embedding(5, 6), input_shape=(3, 4), dtype='int32')) reference.compile(optimizer='rmsprop', loss='mse') reference.layers[0].set_weights(weights) reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output,", "def test_Bidirectional_merged_value(merge_mode): rnn = layers.LSTM samples = 2 dim = 5 timesteps =", "range(4): model_input[i, i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1,", "c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 32)), np.zeros((6,", "y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s = model.predict([x_np, s_fw_np +", "scale=2, size=(1, 10, 2)), np.broadcast_to(np.array([0, 1]), (1, 10, 2))) # Assert that mean", "i:, :] = 0. model.compile(optimizer='rmsprop', loss='mse') model.fit(model_input, np.random.random((10, 3, 5)), epochs=1, batch_size=6) mask_outputs", "ref_mask_val_1 = ref_mask_val_0 # first RNN layer ref_mask_val_2 = np.any(ref_mask_val_1, axis=-1) # second", "keras.utils.generic_utils import object_list_uid, to_list @pytest.mark.skipif(K.backend() == 'mxnet', reason='MXNet backend does not support TimeDistributed", "3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with CustomObjectScope(custom_objects):", "layer = wrappers.Bidirectional.from_config(copy.deepcopy(config)) y = layer([x, s_for, s_bac, c]) model = Model([x, s_for,", "c_np]) assert_allclose(y_np, y_np_3, atol=1e-4) def test_Bidirectional_trainable(): # test layers that need learning_phase to", "not support TimeDistributed and RNN yet') @pytest.mark.skipif((K.backend() == 'cntk'), reason='Flaky with CNTK backend')", "== 0 assert len(layer.trainable_weights) == 0 layer.trainable = True assert len(layer.updates) == 2", "x2 in zip(y_merged, y_expected): assert_allclose(x1, x2, atol=1e-5) # test if the state of", "len(layer.trainable_weights) == 6 def test_Bidirectional_updates(): x = Input(shape=(3, 2)) layer = wrappers.Bidirectional(layers.SimpleRNN(3)) assert", "Input(shape=(3, 2)) layer = wrappers.Bidirectional( layers.SimpleRNN(3, kernel_regularizer='l1', bias_regularizer='l1')) _ = layer(x) assert len(layer.losses)", "TimeDistributed and RNN yet') def test_TimeDistributed_trainable(): # test layers that need learning_phase to", "for layer in model.layers[1:]: mask_outputs.append(layer.compute_mask(layer.input, mask_outputs[-1])) func = K.function([model.input], mask_outputs[:-1]) mask_outputs_val = func([model_input])", "= wrappers.Bidirectional(rnn(units))(input2, initial_state=state) model = Model([input1, input2], output) assert len(model.layers) == 4 assert", "reference_output = reference.predict(test_input) assert_allclose(test_output, reference_output, atol=1e-05) # test with Embedding model = Sequential()", "lambda y, y_rev: np.concatenate((y, y_rev), axis=-1) else: merge_func = lambda y, y_rev: [y,", "test_Bidirectional_trainable(): # test layers that need learning_phase to be set x = Input(shape=(3,", "output_dim = 2 dropout_rate = 0.2 for mode in ['sum', 'concat']: x =", "with pytest.raises(ValueError): output = wrappers.Bidirectional(rnn(units))(input2, initial_state=state[0]) # test valid usage: passing a list", "= Sequential() model.add(layers.Dense(3, input_dim=2)) outer_model = Sequential() outer_model.add(wrappers.TimeDistributed(model, input_shape=(3, 2))) outer_model.compile(optimizer='rmsprop', loss='mse') outer_model.fit(np.random.random((10,", "assert len(layer.trainable_weights) == 2 layer.trainable = False assert len(layer.updates) == 0 assert len(layer.trainable_weights)", "* y_rev elif merge_mode == 'ave': merge_func = lambda y, y_rev: (y +" ]
[ "import absolute_import, division, print_function, with_statement import errno import socket from tornado.platform import interface", "that port (\"Address already in # use\") despite that the OS picked it.", "errno import socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS independent", "wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try: while True: result", "count >= 10: # I've never seen it go above 2 a.close() self.writer.close()", "async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling the trigger sends", "hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned", "raise # (10048, 'Address already in use') # assert count <= 2 #", "self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self):", "def consume(self): try: while True: result = self.reader.recv(1024) if not result: break except", "self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return", "from __future__ import absolute_import, division, print_function, with_statement import errno import socket from tornado.platform", "we loop until a connect() succeeds (almost always # on the first try).", "never triggered in Tim's tests if count >= 10: # I've never seen", "# assigned (host, port) pair try: self.writer.connect(connect_address) break # success except socket.error as", "efficiency, let the OS pick # a free port for us. # Unfortunately,", "a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a` and try again. Note:", "Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling the trigger", "0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break", "detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\" is the only error #", "= a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def", "self.writer.connect(connect_address) break # success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or", "platforms that don't have os.pipe() (or where pipes cannot be passed to select()),", "the first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous", "<gh_stars>100-1000 \"\"\"Lowest-common-denominator implementations of platform functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement", "import errno import socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS", "picked it. This appears # to be a race bug in the Windows", "loop until a connect() succeeds (almost always # on the first try). See", "cannot be passed to select()), but do have sockets. This includes Windows and", "self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass", "showed that we may not # be able to connect to that port", "from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For", "# Bind to a local port; for efficiency, let the OS pick #", "seen on two WinXP Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1.", "and we want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)", "under # Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already in use')", "use') # assert count <= 2 # never triggered in Tim's tests if", "the trigger sends 1 byte, # and we want that sent immediately, to", "# http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address", "up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count += 1", "Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address", "SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already", "be passed to select()), but do have sockets. This includes Windows and Jython.", "return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try:", "2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a` and try again.", "pick # a free port for us. # Unfortunately, stress tests showed that", "two WinXP Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise #", "# on the first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html #", "I originally put a short # sleep() here, but it didn't appear to", "but do have sockets. This includes Windows and Jython. \"\"\" def __init__(self): #", "free port for us. # Unfortunately, stress tests showed that we may not", "+= 1 # Bind to a local port; for efficiency, let the OS", "stress tests showed that we may not # be able to connect to", "1 # Bind to a local port; for efficiency, let the OS pick", "socket.error): pass def consume(self): try: while True: result = self.reader.recv(1024) if not result:", "pass def consume(self): try: while True: result = self.reader.recv(1024) if not result: break", "1 byte, # and we want that sent immediately, to wake up ASAP.", "in the Windows socket implementation. # So we loop until a connect() succeeds", "asynchronous pipe. For use on platforms that don't have os.pipe() (or where pipes", "# sleep() here, but it didn't appear to help or hurt. a.close() self.reader,", "already in use\" is the only error # I've seen on two WinXP", "the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket()", "For use on platforms that don't have os.pipe() (or where pipes cannot be", "connect_address = a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break # success", "short # sleep() here, but it didn't appear to help or hurt. a.close()", "I've seen on two WinXP Pro SP2 boxes, under # Pythons 2.3.5 and", "# I've never seen it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind", "the OS pick # a free port for us. # Unfortunately, stress tests", "self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\")", "independent asynchronous pipe. For use on platforms that don't have os.pipe() (or where", "interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For use on platforms", "0 while 1: count += 1 # Bind to a local port; for", "fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError,", "use on platforms that don't have os.pipe() (or where pipes cannot be passed", "boxes, under # Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already in", "# Close `a` and try again. Note: I originally put a short #", "in use') # assert count <= 2 # never triggered in Tim's tests", "OS pick # a free port for us. # Unfortunately, stress tests showed", "details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host,", "# (10048, 'Address already in use') # assert count <= 2 # never", "always # on the first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html", "Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already in use') # assert", "a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self):", "http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling the trigger sends 1", "pulling the trigger sends 1 byte, # and we want that sent immediately,", "write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self):", "= 0 while 1: count += 1 # Bind to a local port;", "but it didn't appear to help or hurt. a.close() self.reader, addr = a.accept()", "for us. # Unfortunately, stress tests showed that we may not # be", "= socket.socket() # Disable buffering -- pulling the trigger sends 1 byte, #", "# \"Address already in use\" is the only error # I've seen on", "This appears # to be a race bug in the Windows socket implementation.", "# So we loop until a connect() succeeds (almost always # on the", "os.pipe() (or where pipes cannot be passed to select()), but do have sockets.", "appears # to be a race bug in the Windows socket implementation. #", "try: self.writer.connect(connect_address) break # success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE')", "to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count", "See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a =", "!= errno.WSAEADDRINUSE): # \"Address already in use\" is the only error # I've", "immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1:", "is the only error # I've seen on two WinXP Pro SP2 boxes,", "us. # Unfortunately, stress tests showed that we may not # be able", "= a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break # success except", "that the OS picked it. This appears # to be a race bug", "# I've seen on two WinXP Pro SP2 boxes, under # Pythons 2.3.5", "go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a` and", "socket.socket() # Disable buffering -- pulling the trigger sends 1 byte, # and", "Windows socket implementation. # So we loop until a connect() succeeds (almost always", "that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0", "in Tim's tests if count >= 10: # I've never seen it go", "a connect() succeeds (almost always # on the first try). See the long", "passed to select()), but do have sockets. This includes Windows and Jython. \"\"\"", "def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable", "didn't appear to help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0)", "I've never seen it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\")", "self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return", "thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0))", "able to connect to that port (\"Address already in # use\") despite that", "not # be able to connect to that port (\"Address already in #", "pipe. For use on platforms that don't have os.pipe() (or where pipes cannot", "count += 1 # Bind to a local port; for efficiency, let the", "port) pair try: self.writer.connect(connect_address) break # success except socket.error as detail: if (not", "# be able to connect to that port (\"Address already in # use\")", "assigned (host, port) pair try: self.writer.connect(connect_address) break # success except socket.error as detail:", "(10048, 'Address already in use') # assert count <= 2 # never triggered", "a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break # success except socket.error", "to select()), but do have sockets. This includes Windows and Jython. \"\"\" def", "Jython. \"\"\" def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket()", "self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try: while True: result = self.reader.recv(1024)", "`a` and try again. Note: I originally put a short # sleep() here,", "already in # use\") despite that the OS picked it. This appears #", "wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count +=", "in # use\") despite that the OS picked it. This appears # to", "assert count <= 2 # never triggered in Tim's tests if count >=", "a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port)", "sockets. This includes Windows and Jython. \"\"\" def __init__(self): # Based on Zope", "# never triggered in Tim's tests if count >= 10: # I've never", "may not # be able to connect to that port (\"Address already in", "socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe.", "to help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd", "# and we want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY,", "local port; for efficiency, let the OS pick # a free port for", "try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a", "where pipes cannot be passed to select()), but do have sockets. This includes", "trigger!\") # Close `a` and try again. Note: I originally put a short", "def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def", "let the OS pick # a free port for us. # Unfortunately, stress", "tests showed that we may not # be able to connect to that", "sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while", "detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already in", "if count >= 10: # I've never seen it go above 2 a.close()", "pipes cannot be passed to select()), but do have sockets. This includes Windows", "'Address already in use') # assert count <= 2 # never triggered in", "connect() succeeds (almost always # on the first try). See the long thread", "2.4.1. raise # (10048, 'Address already in use') # assert count <= 2", "(IOError, socket.error): pass def consume(self): try: while True: result = self.reader.recv(1024) if not", "to be a race bug in the Windows socket implementation. # So we", "Close `a` and try again. Note: I originally put a short # sleep()", "# to be a race bug in the Windows socket implementation. # So", "select()), but do have sockets. This includes Windows and Jython. \"\"\" def __init__(self):", "or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno()", "while True: result = self.reader.recv(1024) if not result: break except (IOError, socket.error): pass", "Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For use on platforms that don't", "sends 1 byte, # and we want that sent immediately, to wake up", "<= 2 # never triggered in Tim's tests if count >= 10: #", "import interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For use on", "sleep() here, but it didn't appear to help or hurt. a.close() self.reader, addr", "(not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\" is", "long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind((\"127.0.0.1\",", "above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a` and try", "count = 0 while 1: count += 1 # Bind to a local", "implementations of platform functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement import errno", "= self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try:", "# Pythons 2.3.5 and 2.4.1. raise # (10048, 'Address already in use') #", "socket.TCP_NODELAY, 1) count = 0 while 1: count += 1 # Bind to", "Unfortunately, stress tests showed that we may not # be able to connect", "help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd =", "(host, port) pair try: self.writer.connect(connect_address) break # success except socket.error as detail: if", "a race bug in the Windows socket implementation. # So we loop until", "OS independent asynchronous pipe. For use on platforms that don't have os.pipe() (or", "try: while True: result = self.reader.recv(1024) if not result: break except (IOError, socket.error):", "Windows and Jython. \"\"\" def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer", "to a local port; for efficiency, let the OS pick # a free", "__future__ import absolute_import, division, print_function, with_statement import errno import socket from tornado.platform import", "an OS independent asynchronous pipe. For use on platforms that don't have os.pipe()", "# Disable buffering -- pulling the trigger sends 1 byte, # and we", "Tim's tests if count >= 10: # I've never seen it go above", "\"\"\" def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() #", "it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a`", "# assert count <= 2 # never triggered in Tim's tests if count", "1: count += 1 # Bind to a local port; for efficiency, let", "includes Windows and Jython. \"\"\" def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py", "only error # I've seen on two WinXP Pro SP2 boxes, under #", "Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling", "or detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\" is the only error", "on platforms that don't have os.pipe() (or where pipes cannot be passed to", "raise socket.error(\"Cannot bind trigger!\") # Close `a` and try again. Note: I originally", "(almost always # on the first try). See the long thread at #", "and try again. Note: I originally put a short # sleep() here, but", "a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self):", "-- pulling the trigger sends 1 byte, # and we want that sent", "don't have os.pipe() (or where pipes cannot be passed to select()), but do", "at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1)", "10: # I've never seen it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot", "Note: I originally put a short # sleep() here, but it didn't appear", "buffering -- pulling the trigger sends 1 byte, # and we want that", "# for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname()", "Bind to a local port; for efficiency, let the OS pick # a", "if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\"", "self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno()", "functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement import errno import socket from", "absolute_import, division, print_function, with_statement import errno import socket from tornado.platform import interface class", "socket.error(\"Cannot bind trigger!\") # Close `a` and try again. Note: I originally put", "except (IOError, socket.error): pass def consume(self): try: while True: result = self.reader.recv(1024) if", "result = self.reader.recv(1024) if not result: break except (IOError, socket.error): pass def close(self):", "of platform functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement import errno import", "return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error):", "it didn't appear to help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0)", "# success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] !=", "that don't have os.pipe() (or where pipes cannot be passed to select()), but", "first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details.", "(or where pipes cannot be passed to select()), but do have sockets. This", "def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try: while True:", "error # I've seen on two WinXP Pro SP2 boxes, under # Pythons", "addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno()", "we want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count", "have os.pipe() (or where pipes cannot be passed to select()), but do have", "to connect to that port (\"Address already in # use\") despite that the", "it. This appears # to be a race bug in the Windows socket", "in use\" is the only error # I've seen on two WinXP Pro", "port (\"Address already in # use\") despite that the OS picked it. This", "# a free port for us. # Unfortunately, stress tests showed that we", "race bug in the Windows socket implementation. # So we loop until a", "triggered in Tim's tests if count >= 10: # I've never seen it", "again. Note: I originally put a short # sleep() here, but it didn't", "1) count = 0 while 1: count += 1 # Bind to a", "never seen it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") #", "while 1: count += 1 # Bind to a local port; for efficiency,", "socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address", "count <= 2 # never triggered in Tim's tests if count >= 10:", "connect to that port (\"Address already in # use\") despite that the OS", "hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\" is the", "self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close `a` and try again. Note: I", "def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except", "seen it go above 2 a.close() self.writer.close() raise socket.error(\"Cannot bind trigger!\") # Close", "a free port for us. # Unfortunately, stress tests showed that we may", "tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For use", "\"\"\"Create an OS independent asynchronous pipe. For use on platforms that don't have", "on the first try). See the long thread at # http://mail.zope.org/pipermail/zope/2005-July/160433.html # for", "= self.reader.recv(1024) if not result: break except (IOError, socket.error): pass def close(self): self.reader.close()", "on two WinXP Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise", "'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already in use\" is the only", "for efficiency, let the OS pick # a free port for us. #", "want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count =", ">= 10: # I've never seen it go above 2 a.close() self.writer.close() raise", "on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering -- pulling the", "self.reader.recv(1024) if not result: break except (IOError, socket.error): pass def close(self): self.reader.close() self.writer.close()", "errno.WSAEADDRINUSE): # \"Address already in use\" is the only error # I've seen", "be able to connect to that port (\"Address already in # use\") despite", "break # success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0]", "2 # never triggered in Tim's tests if count >= 10: # I've", "2.3.5 and 2.4.1. raise # (10048, 'Address already in use') # assert count", "already in use') # assert count <= 2 # never triggered in Tim's", "succeeds (almost always # on the first try). See the long thread at", "implementation. # So we loop until a connect() succeeds (almost always # on", "except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): #", "(\"Address already in # use\") despite that the OS picked it. This appears", "a local port; for efficiency, let the OS pick # a free port", "__init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering", "platform functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement import errno import socket", "print_function, with_statement import errno import socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create", "use\" is the only error # I've seen on two WinXP Pro SP2", "bind trigger!\") # Close `a` and try again. Note: I originally put a", "trigger sends 1 byte, # and we want that sent immediately, to wake", "pair try: self.writer.connect(connect_address) break # success except socket.error as detail: if (not hasattr(errno,", "try again. Note: I originally put a short # sleep() here, but it", "and Jython. \"\"\" def __init__(self): # Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer =", "port for us. # Unfortunately, stress tests showed that we may not #", "socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair try:", "bug in the Windows socket implementation. # So we loop until a connect()", "self.writer = socket.socket() # Disable buffering -- pulling the trigger sends 1 byte,", "OS picked it. This appears # to be a race bug in the", "the OS picked it. This appears # to be a race bug in", "byte, # and we want that sent immediately, to wake up ASAP. self.writer.setsockopt(socket.IPPROTO_TCP,", "port; for efficiency, let the OS pick # a free port for us.", "put a short # sleep() here, but it didn't appear to help or", "# Unfortunately, stress tests showed that we may not # be able to", "until a connect() succeeds (almost always # on the first try). See the", "= socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair", "ASAP. self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count += 1 #", "the Windows socket implementation. # So we loop until a connect() succeeds (almost", "\"Address already in use\" is the only error # I've seen on two", "self.writer.fileno() def wake(self): try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try: while", "division, print_function, with_statement import errno import socket from tornado.platform import interface class Waker(interface.Waker):", "the only error # I've seen on two WinXP Pro SP2 boxes, under", "with_statement import errno import socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an", "despite that the OS picked it. This appears # to be a race", "WinXP Pro SP2 boxes, under # Pythons 2.3.5 and 2.4.1. raise # (10048,", "socket implementation. # So we loop until a connect() succeeds (almost always #", "and 2.4.1. raise # (10048, 'Address already in use') # assert count <=", "\"\"\"Lowest-common-denominator implementations of platform functionality.\"\"\" from __future__ import absolute_import, division, print_function, with_statement import", "to that port (\"Address already in # use\") despite that the OS picked", "do have sockets. This includes Windows and Jython. \"\"\" def __init__(self): # Based", "try: self.writer.send(b\"x\") except (IOError, socket.error): pass def consume(self): try: while True: result =", "class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous pipe. For use on platforms that", "This includes Windows and Jython. \"\"\" def __init__(self): # Based on Zope async.py:", "hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close() self.reader_fd = self.reader.fileno() def", "a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address) break #", "for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() #", "import socket from tornado.platform import interface class Waker(interface.Waker): \"\"\"Create an OS independent asynchronous", "be a race bug in the Windows socket implementation. # So we loop", "use\") despite that the OS picked it. This appears # to be a", "Disable buffering -- pulling the trigger sends 1 byte, # and we want", "have sockets. This includes Windows and Jython. \"\"\" def __init__(self): # Based on", "as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE): # \"Address already", "tests if count >= 10: # I've never seen it go above 2", "True: result = self.reader.recv(1024) if not result: break except (IOError, socket.error): pass def", "self.writer.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) count = 0 while 1: count += 1 # Bind", "consume(self): try: while True: result = self.reader.recv(1024) if not result: break except (IOError,", "a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address = a.getsockname() # assigned (host, port) pair try: self.writer.connect(connect_address)", "# Based on Zope async.py: http://svn.zope.org/zc.ngi/trunk/src/zc/ngi/async.py self.writer = socket.socket() # Disable buffering --", "appear to help or hurt. a.close() self.reader, addr = a.accept() self.reader.setblocking(0) self.writer.setblocking(0) a.close()", "a.close() self.reader_fd = self.reader.fileno() def fileno(self): return self.reader.fileno() def write_fileno(self): return self.writer.fileno() def", "So we loop until a connect() succeeds (almost always # on the first", "here, but it didn't appear to help or hurt. a.close() self.reader, addr =", "we may not # be able to connect to that port (\"Address already", "that we may not # be able to connect to that port (\"Address", "success except socket.error as detail: if (not hasattr(errno, 'WSAEADDRINUSE') or detail[0] != errno.WSAEADDRINUSE):", "a short # sleep() here, but it didn't appear to help or hurt.", "# use\") despite that the OS picked it. This appears # to be", "http://mail.zope.org/pipermail/zope/2005-July/160433.html # for hideous details. a = socket.socket() a.bind((\"127.0.0.1\", 0)) a.listen(1) connect_address =", "originally put a short # sleep() here, but it didn't appear to help" ]
[ "options.image_name # Some visual indication that it works, for headless setups (green tape)", "next row by a given latitude, also specified by user 6) sleeps for", "0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180) / (180 -", "options.num_leds i_name = options.image_name # Some visual indication that it works, for headless", "\" + e.args[0]) # flush any incomplete data bt.show() num_errors += 1 if", "0, but this will result in a never-changing LEDs. -i/--image: str Name of", "to bt for pixel in output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel))", "im # Tape resets to stored pattern after a few seconds of inactivity", "# finally, show the image bt.show() # delete variables for memory management del", "except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime", "Image import numpy as np import sys MAX_ERRORS = 3 num_errors = 0", "json.load(f) # Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser =", "Uses the following arguments: -l/--location: tuple Location of the user in tuple(lat, lon).", "np.take(a[latitude_index], indices, axis=0) # rotate the row to center around the specified longitude", "print(\"Encountered runtime error: \" + e.args[0]) # flush any incomplete data bt.show() num_errors", "(ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\",", "import BlinkyTape from time import sleep from PIL import Image import numpy as", "minutes # sleep(10) # Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt,", "Name of the PNG image that contains the color coded pathymetric data. The", "samples a 'number of LEDs' number of pixels from that row 3) shifts", "args) = parser.parse_args() if args: print(\"Unknown parameters: \" + args) # grab the", "following algorithm: 1) uses user-provided location to obtain row of pixel data from", "= min(rows - 1, max(0, (int)(((loc[0] - -90) / (90 - -90)) *", "pole if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index))", "# Can be many different formats. cols, rows = im.size a = np.asarray(im)", "of LEDs' number of pixels from that row 3) shifts the sampled row", "latitude every update rate. May be 0, but this will result in a", "latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in", "x in range(n_leds)] # sample that row of pixel data output_pixels = np.take(a[latitude_index],", "center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all", "pixel in output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally,", "the values provided by user (or defaults) port = options.portname loc = options.location", "bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: #", "= min(cols - 1, max(0, (int)(((loc[1] - -180) / (180 - -180)) *", "data to center it at the location specified by user 4) displays resulting", "center it at the location specified by user 4) displays resulting pixels on", "wraps to next pole if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index:", "default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\",", "print(\"Next latitude: \" + str(loc[0])) # grab the applicable pixel indices indices =", "print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab the", "never-changing LEDs. -i/--image: str Name of the PNG image that contains the color", "import sys MAX_ERRORS = 3 num_errors = 0 # Obtain default parameters with", "specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to", "in range(n_leds)] # sample that row of pixel data output_pixels = np.take(a[latitude_index], indices,", "different formats. cols, rows = im.size a = np.asarray(im) # of shape (rows,", "(rows - 0) + 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] -", "providing command line arguments, you may alternatively edit the defaults in bath_config.json. NOTE:", "that row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the", "options.location rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name", "of time Uses the following arguments: -l/--location: tuple Location of the user in", "strip. Defaults to (0, 0) -u/--update-interval: int Update interval of the script, in", "-90) / (90 - -90)) * (rows - 0) + 0))) longitude_index =", "during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip", "will modulate the blinky lights using the following algorithm: 1) uses user-provided location", "'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every update rate.", "print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image", "-180) / (180 - -180)) * (cols - 0) + 0))) # update", "of the next row of elevation data to take loc[0] += delta loc[0]", "the LED strip. Defaults to (0, 0) -u/--update-interval: int Update interval of the", "dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name", "the next row of elevation data to take loc[0] += delta loc[0] =", "100, 0) bt.show() sleep(2) while True: try: # first, load image im =", "LED strip. Defaults to (0, 0) -u/--update-interval: int Update interval of the script,", "# grab the values provided by user (or defaults) port = options.portname loc", "image im = Image.open(i_name) # Can be many different formats. cols, rows =", "default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default Blinky Tape", "int Update interval of the script, in minutes. Defaults to 10. -p/--port: str", "optparse import json from blinkytape import BlinkyTape from time import sleep from PIL", "bathy image 2) samples a 'number of LEDs' number of pixels from that", "axis=0) # rotate the row to center around the specified longitude output_pixels =", "shape (rows, cols, channels) # map loc latitude to 0-based index latitude_index =", "data to take loc[0] += delta loc[0] = ((loc[0] + 90) % 180)", "as np import sys MAX_ERRORS = 3 num_errors = 0 # Obtain default", "open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default Blinky Tape port on Raspberry", "indication that it works, for headless setups (green tape) bt = BlinkyTape(port, n_leds)", "3) shifts the sampled row data to center it at the location specified", "you may alternatively edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/')", "send all pixel data to bt for pixel in output_pixels: print(\"Sending r: {},", "(ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of the LED", "around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel", "user (or defaults) port = options.portname loc = options.location rate = options.update_rate delta", "(90 - -90)) * (rows - 0) + 0))) longitude_index = min(cols -", "# send all pixel data to bt for pixel in output_pixels: print(\"Sending r:", "a del im # Tape resets to stored pattern after a few seconds", "edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME>", "interval of the script, in minutes. Defaults to 10. -p/--port: str Serial port", "In lieu of providing command line arguments, you may alternatively edit the defaults", "the PNG image that contains the color coded pathymetric data. The file current", "current named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of", "help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of", "indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row of pixel", "if args: print(\"Unknown parameters: \" + args) # grab the values provided by", "provided by user (or defaults) port = options.portname loc = options.location rate =", "in latitude every update rate. May be 0, but this will result in", "parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of the LED strip (ex: 70,-110)\",", "except RuntimeError as e: print(\"Encountered runtime error: \" + e.args[0]) # flush any", "row 3) shifts the sampled row data to center it at the location", "the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in", "that contains the color coded pathymetric data. The file current named mapserv.png was", "from time import sleep from PIL import Image import numpy as np import", "given latitude, also specified by user 6) sleeps for user-specified period of time", "# Some visual indication that it works, for headless setups (green tape) bt", "Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex:", "loc = options.location rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name", "(int)(((loc[0] - -90) / (90 - -90)) * (rows - 0) + 0)))", "Tape resets to stored pattern after a few seconds of inactivity sleep(rate *", "= Image.open(i_name) # Can be many different formats. cols, rows = im.size a", "to center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send", "MAX_ERRORS = 3 num_errors = 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as", "profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update", "(C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json from blinkytape", "minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered", "update the location of the next row of elevation data to take loc[0]", "in output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show", "it works, for headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100,", "the sampled row data to center it at the location specified by user", "resulting pixels on Blinky Tape 5) shifts next row by a given latitude,", "rows = im.size a = np.asarray(im) # of shape (rows, cols, channels) #", "5) shifts next row by a given latitude, also specified by user 6)", "but this will result in a never-changing LEDs. -i/--image: str Name of the", "that it works, for headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0,", "import optparse import json from blinkytape import BlinkyTape from time import sleep from", "5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"])", "(ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\",", "print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime error: \"", "= np.asarray(im) # of shape (rows, cols, channels) # map loc latitude to", "ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime error: \" + e.args[0])", "indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row of", "sampled row data to center it at the location specified by user 4)", "be many different formats. cols, rows = im.size a = np.asarray(im) # of", "the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample", "/ (90 - -90)) * (rows - 0) + 0))) longitude_index = min(cols", "+ 90) % 180) - 90 # wraps to next pole if overflow", "a few seconds of inactivity sleep(rate * 60) # Wait specified number of", "\"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\",", "-u/--update-interval: int Update interval of the script, in minutes. Defaults to 10. -p/--port:", "for user-specified period of time Uses the following arguments: -l/--location: tuple Location of", "This represents the center of the LED strip. Defaults to (0, 0) -u/--update-interval:", "often to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change", "\"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) =", "% 180) - 90 # wraps to next pole if overflow print(\"Lat index:", "in a never-changing LEDs. -i/--image: str Name of the PNG image that contains", "# wraps to next pole if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon", "coded pathymetric data. The file current named mapserv.png was obtained using the following", "# map loc latitude to 0-based index latitude_index = min(rows - 1, max(0,", "min(cols - 1, max(0, (int)(((loc[1] - -180) / (180 - -180)) * (cols", "bt for pixel in output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel)", "location to obtain row of pixel data from bathy image 2) samples a", "obtain row of pixel data from bathy image 2) samples a 'number of", "row data to center it at the location specified by user 4) displays", "# Wait specified number of minutes # sleep(10) # Wait specified number of", "str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab the applicable pixel indices indices", "values provided by user (or defaults) port = options.portname loc = options.location rate", "seconds of inactivity sleep(rate * 60) # Wait specified number of minutes #", "- 1, max(0, (int)(((loc[1] - -180) / (180 - -180)) * (cols -", "try: # first, load image im = Image.open(i_name) # Can be many different", "rate. May be 0, but this will result in a never-changing LEDs. -i/--image:", "number of pixels from that row 3) shifts the sampled row data to", "by user 4) displays resulting pixels on Blinky Tape 5) shifts next row", "resets to stored pattern after a few seconds of inactivity sleep(rate * 60)", "pixels from that row 3) shifts the sampled row data to center it", "e.args[0]) # flush any incomplete data bt.show() num_errors += 1 if num_errors >", "/dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of the LED strip", "parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args)", "+ args) # grab the values provided by user (or defaults) port =", "\" + str(loc[0])) # grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for", "/ (180 - -180)) * (cols - 0) + 0))) # update the", "parameters: \" + args) # grab the values provided by user (or defaults)", "to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every update rate. May be", "(ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown parameters: \" +", "cols, channels) # map loc latitude to 0-based index latitude_index = min(rows -", "shifts next row by a given latitude, also specified by user 6) sleeps", "+ str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) #", "Vertical change in latitude every update rate. May be 0, but this will", "(cols - 0) + 0))) # update the location of the next row", "elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during", "obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments,", "= options.num_leds i_name = options.image_name # Some visual indication that it works, for", "= np.take(a[latitude_index], indices, axis=0) # rotate the row to center around the specified", "\" + args) # grab the values provided by user (or defaults) port", "Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\",", "KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime error:", "following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you may alternatively", "to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in", "sample that row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate", "indices, axis=0) # rotate the row to center around the specified longitude output_pixels", "a never-changing LEDs. -i/--image: str Name of the PNG image that contains the", "delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some visual indication", "BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude", "dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args()", "the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel data", "grab the values provided by user (or defaults) port = options.portname loc =", "incomplete data bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error count exceeds", "loc[0] += delta loc[0] = ((loc[0] + 90) % 180) - 90 #", "Tape port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\",", "0) + 0))) # update the location of the next row of elevation", "data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row to center around", "str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude:", "any incomplete data bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error count", "of pixel data from bathy image 2) samples a 'number of LEDs' number", "at the location specified by user 4) displays resulting pixels on Blinky Tape", "rotate the row to center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index,", "= BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: # first,", "parser.parse_args() if args: print(\"Unknown parameters: \" + args) # grab the values provided", "first, load image im = Image.open(i_name) # Can be many different formats. cols,", "-90)) * (rows - 0) + 0))) longitude_index = min(cols - 1, max(0,", "from PIL import Image import numpy as np import sys MAX_ERRORS = 3", "parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"])", "a given latitude, also specified by user 6) sleeps for user-specified period of", "API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you may alternatively edit", "BlinkyTape from time import sleep from PIL import Image import numpy as np", "row of pixel data from bathy image 2) samples a 'number of LEDs'", "pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row", "pattern after a few seconds of inactivity sleep(rate * 60) # Wait specified", "# rotate the row to center around the specified longitude output_pixels = np.roll(output_pixels,", "(0, 0) -u/--update-interval: int Update interval of the script, in minutes. Defaults to", "https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you may alternatively edit the", "# of shape (rows, cols, channels) # map loc latitude to 0-based index", "optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location", "+ 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180) / (180", "user-specified period of time Uses the following arguments: -l/--location: tuple Location of the", "the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you may", "defaults) port = options.portname loc = options.location rate = options.update_rate delta = options.delta_latitude", "6) sleeps for user-specified period of time Uses the following arguments: -l/--location: tuple", "map loc latitude to 0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0]", "\"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\",", "parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of", "# sample that row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) #", "many different formats. cols, rows = im.size a = np.asarray(im) # of shape", "Image.open(i_name) # Can be many different formats. cols, rows = im.size a =", "output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the", "image 2) samples a 'number of LEDs' number of pixels from that row", "that row 3) shifts the sampled row data to center it at the", "0))) # update the location of the next row of elevation data to", "loc latitude to 0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0] -", "# update the location of the next row of elevation data to take", "specified by user 4) displays resulting pixels on Blinky Tape 5) shifts next", "config = json.load(f) # Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0", "g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() # delete", "the user in tuple(lat, lon). This represents the center of the LED strip.", "the image bt.show() # delete variables for memory management del a del im", "\" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0]))", "of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the", "Location of the user in tuple(lat, lon). This represents the center of the", "of the LED strip. Defaults to (0, 0) -u/--update-interval: int Update interval of", "runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json", "latitude: \" + str(loc[0])) # grab the applicable pixel indices indices = [(int)(x*(cols/n_leds))", "pixel data to bt for pixel in output_pixels: print(\"Sending r: {}, g: {},", "displays resulting pixels on Blinky Tape 5) shifts next row by a given", "{}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() # delete variables", "print(\"Unknown parameters: \" + args) # grab the values provided by user (or", "np import sys MAX_ERRORS = 3 num_errors = 0 # Obtain default parameters", "Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except", "(ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile (mins)", "Update interval of the script, in minutes. Defaults to 10. -p/--port: str Serial", "LEDs. -i/--image: str Name of the PNG image that contains the color coded", "every update rate. May be 0, but this will result in a never-changing", "the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update", "0) -u/--update-interval: int Update interval of the script, in minutes. Defaults to 10.", "parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default Blinky Tape port", "LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation", "show the image bt.show() # delete variables for memory management del a del", "by a given latitude, also specified by user 6) sleeps for user-specified period", "range(n_leds)] # sample that row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0)", "of shape (rows, cols, channels) # map loc latitude to 0-based index latitude_index", "index: \" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab the applicable", "BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: # first, load", "* (cols - 0) + 0))) # update the location of the next", "RuntimeError as e: print(\"Encountered runtime error: \" + e.args[0]) # flush any incomplete", "file current named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu", "n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: # first, load image", "args: print(\"Unknown parameters: \" + args) # grab the values provided by user", "to center it at the location specified by user 4) displays resulting pixels", "also specified by user 6) sleeps for user-specified period of time Uses the", "blinkytape import BlinkyTape from time import sleep from PIL import Image import numpy", "named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing", "r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show()", "data bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error count exceeds that", "(ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\",", "./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown parameters: \" + args)", "longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180) / (180 - -180))", "formats. cols, rows = im.size a = np.asarray(im) # of shape (rows, cols,", "to take loc[0] += delta loc[0] = ((loc[0] + 90) % 180) -", "- 0) + 0))) # update the location of the next row of", "command line arguments, you may alternatively edit the defaults in bath_config.json. NOTE: runs", "Licensed \"\"\" import optparse import json from blinkytape import BlinkyTape from time import", "import Image import numpy as np import sys MAX_ERRORS = 3 num_errors =", "represents the center of the LED strip. Defaults to (0, 0) -u/--update-interval: int", "period of time Uses the following arguments: -l/--location: tuple Location of the user", "may alternatively edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C)", "dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center", "latitude to 0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90)", "- -90)) * (rows - 0) + 0))) longitude_index = min(cols - 1,", "sleep from PIL import Image import numpy as np import sys MAX_ERRORS =", "0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) #", "10. -p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to", "error: \" + e.args[0]) # flush any incomplete data bt.show() num_errors += 1", "<NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json from blinkytape import BlinkyTape", "of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to", "0) bt.show() sleep(2) while True: try: # first, load image im = Image.open(i_name)", "as e: print(\"Encountered runtime error: \" + e.args[0]) # flush any incomplete data", "bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try: # first, load image im", "row to center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) #", "user 4) displays resulting pixels on Blinky Tape 5) shifts next row by", "number of minutes # sleep(10) # Wait specified number of minutes except KeyboardInterrupt:", "# Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit()", "flush any incomplete data bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error", "lon). This represents the center of the LED strip. Defaults to (0, 0)", "using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line arguments, you", "= np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to bt for pixel", "n_leds = options.num_leds i_name = options.image_name # Some visual indication that it works,", "- 90 # wraps to next pole if overflow print(\"Lat index: \" +", "the blinky lights using the following algorithm: 1) uses user-provided location to obtain", "index latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90) / (90 -", "delta loc[0] = ((loc[0] + 90) % 180) - 90 # wraps to", "- -180) / (180 - -180)) * (cols - 0) + 0))) #", "{}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() #", "dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number", "help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of", "load image im = Image.open(i_name) # Can be many different formats. cols, rows", "loc[0] = ((loc[0] + 90) % 180) - 90 # wraps to next", "-i/--image: str Name of the PNG image that contains the color coded pathymetric", "following arguments: -l/--location: tuple Location of the user in tuple(lat, lon). This represents", "of inactivity sleep(rate * 60) # Wait specified number of minutes # sleep(10)", "memory management del a del im # Tape resets to stored pattern after", "blinky lights using the following algorithm: 1) uses user-provided location to obtain row", "= im.size a = np.asarray(im) # of shape (rows, cols, channels) # map", "np.asarray(im) # of shape (rows, cols, channels) # map loc latitude to 0-based", "# first, load image im = Image.open(i_name) # Can be many different formats.", "latitude, also specified by user 6) sleeps for user-specified period of time Uses", "e: print(\"Encountered runtime error: \" + e.args[0]) # flush any incomplete data bt.show()", "parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\",", "number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as", "port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical", "setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while", "in tuple(lat, lon). This represents the center of the LED strip. Defaults to", "update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude", "import numpy as np import sys MAX_ERRORS = 3 num_errors = 0 #", "grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] #", "(int)(((loc[1] - -180) / (180 - -180)) * (cols - 0) + 0)))", "sleep(10) # Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\")", "* (rows - 0) + 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1]", "latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90) / (90 - -90))", "help=\"How often to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\",", "applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that", "from bathy image 2) samples a 'number of LEDs' number of pixels from", "= 3 num_errors = 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f:", "options.portname loc = options.location rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds", "in minutes. Defaults to 10. -p/--port: str Serial port of the BlinkyLight (e.g.,", "help=\"Location of the center of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\",", "The file current named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In", "bt.show() sleep(2) while True: try: # first, load image im = Image.open(i_name) #", "minutes. Defaults to 10. -p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0',", "sleep(rate * 60) # Wait specified number of minutes # sleep(10) # Wait", "= options.portname loc = options.location rate = options.update_rate delta = options.delta_latitude n_leds =", "of pixels from that row 3) shifts the sampled row data to center", "- 0) + 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180)", "the following arguments: -l/--location: tuple Location of the user in tuple(lat, lon). This", "port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial", "lieu of providing command line arguments, you may alternatively edit the defaults in", "of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change", "numpy as np import sys MAX_ERRORS = 3 num_errors = 0 # Obtain", "rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name #", "program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime error: \" + e.args[0]) #", "(e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every", "json from blinkytape import BlinkyTape from time import sleep from PIL import Image", "result in a never-changing LEDs. -i/--image: str Name of the PNG image that", "+ str(loc[0])) # grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x", "Blinky Tape 5) shifts next row by a given latitude, also specified by", "LEDs' number of pixels from that row 3) shifts the sampled row data", "str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab", "runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse", "the location specified by user 4) displays resulting pixels on Blinky Tape 5)", "take loc[0] += delta loc[0] = ((loc[0] + 90) % 180) - 90", "(https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json from blinkytape import BlinkyTape from", "index: \" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \" +", "int Vertical change in latitude every update rate. May be 0, but this", "= options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some visual indication that", "as f: config = json.load(f) # Default Blinky Tape port on Raspberry Pi", "Some visual indication that it works, for headless setups (green tape) bt =", "{}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() # delete variables for memory", "in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image", "defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT", "user-provided location to obtain row of pixel data from bathy image 2) samples", "PNG image that contains the color coded pathymetric data. The file current named", "specified number of minutes # sleep(10) # Wait specified number of minutes except", "the following algorithm: 1) uses user-provided location to obtain row of pixel data", "Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default Blinky", "dest=\"update_rate\", help=\"How often to update elevation profile (mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\",", "tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True: try:", "help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of", "Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int", "specified by user 6) sleeps for user-specified period of time Uses the following", "port = options.portname loc = options.location rate = options.update_rate delta = options.delta_latitude n_leds", "bt.show() # delete variables for memory management del a del im # Tape", "\"\"\" This script will modulate the blinky lights using the following algorithm: 1)", "in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed", "for pixel in output_pixels: print(\"Sending r: {}, g: {}, b: {}\".format(*pixel)) bt.sendPixel(*pixel) #", "this will result in a never-changing LEDs. -i/--image: str Name of the PNG", "i_name = options.image_name # Some visual indication that it works, for headless setups", "help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if", "works, for headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0)", "2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json from blinkytape import", "specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError", "data from bathy image 2) samples a 'number of LEDs' number of pixels", "options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some visual", "max(0, (int)(((loc[0] - -90) / (90 - -90)) * (rows - 0) +", "to stored pattern after a few seconds of inactivity sleep(rate * 60) #", "pathymetric data. The file current named mapserv.png was obtained using the following API:", "180) - 90 # wraps to next pole if overflow print(\"Lat index: \"", "by user (or defaults) port = options.portname loc = options.location rate = options.update_rate", "if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next", "to (0, 0) -u/--update-interval: int Update interval of the script, in minutes. Defaults", "the center of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How", "longitude_index, axis=0) # send all pixel data to bt for pixel in output_pixels:", "np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to bt for pixel in", "pixel data from bathy image 2) samples a 'number of LEDs' number of", "for memory management del a del im # Tape resets to stored pattern", "# sleep(10) # Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending", "Wait specified number of minutes # sleep(10) # Wait specified number of minutes", "variables for memory management del a del im # Tape resets to stored", "True: try: # first, load image im = Image.open(i_name) # Can be many", "for x in range(n_leds)] # sample that row of pixel data output_pixels =", "Defaults to 10. -p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3').", "(options, args) = parser.parse_args() if args: print(\"Unknown parameters: \" + args) # grab", "1, max(0, (int)(((loc[0] - -90) / (90 - -90)) * (rows - 0)", "min(rows - 1, max(0, (int)(((loc[0] - -90) / (90 - -90)) * (rows", "# Tape resets to stored pattern after a few seconds of inactivity sleep(rate", "bt.show() num_errors += 1 if num_errors > MAX_ERRORS: sys.exit(\"Error count exceeds that allowed.\")", "data to bt for pixel in output_pixels: print(\"Sending r: {}, g: {}, b:", "will result in a never-changing LEDs. -i/--image: str Name of the PNG image", "time Uses the following arguments: -l/--location: tuple Location of the user in tuple(lat,", "-p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'.", "= optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\",", "\"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the", "row of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row", "wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import json from", "dest=\"location\", help=\"Location of the center of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\",", "image bt.show() # delete variables for memory management del a del im #", "\" + str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab the applicable pixel", "- -180)) * (cols - 0) + 0))) # update the location of", "to next pole if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \"", "-180)) * (cols - 0) + 0))) # update the location of the", "Can be many different formats. cols, rows = im.size a = np.asarray(im) #", "((loc[0] + 90) % 180) - 90 # wraps to next pole if", "shifts the sampled row data to center it at the location specified by", "of the PNG image that contains the color coded pathymetric data. The file", "it at the location specified by user 4) displays resulting pixels on Blinky", "be 0, but this will result in a never-changing LEDs. -i/--image: str Name", "Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\",", "1) uses user-provided location to obtain row of pixel data from bathy image", "(rows, cols, channels) # map loc latitude to 0-based index latitude_index = min(rows", "a 'number of LEDs' number of pixels from that row 3) shifts the", "del a del im # Tape resets to stored pattern after a few", "the color coded pathymetric data. The file current named mapserv.png was obtained using", "strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex:", "default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown parameters: \" + args) #", "row of elevation data to take loc[0] += delta loc[0] = ((loc[0] +", "update rate. May be 0, but this will result in a never-changing LEDs.", "was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command line", "all pixel data to bt for pixel in output_pixels: print(\"Sending r: {}, g:", "del im # Tape resets to stored pattern after a few seconds of", "runtime error: \" + e.args[0]) # flush any incomplete data bt.show() num_errors +=", "arguments, you may alternatively edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py',", "= json.load(f) # Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser", "image that contains the color coded pathymetric data. The file current named mapserv.png", "# delete variables for memory management del a del im # Tape resets", "- 1, max(0, (int)(((loc[0] - -90) / (90 - -90)) * (rows -", "NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import", "is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"])", "the script, in minutes. Defaults to 10. -p/--port: str Serial port of the", "num_errors = 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config =", "image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown parameters: \"", "+ e.args[0]) # flush any incomplete data bt.show() num_errors += 1 if num_errors", "MIT Licensed \"\"\" import optparse import json from blinkytape import BlinkyTape from time", "f: config = json.load(f) # Default Blinky Tape port on Raspberry Pi is", "+= delta loc[0] = ((loc[0] + 90) % 180) - 90 # wraps", "70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile (mins) (ex:", "with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default Blinky Tape port on", "modulate the blinky lights using the following algorithm: 1) uses user-provided location to", "by user 6) sleeps for user-specified period of time Uses the following arguments:", "arguments: -l/--location: tuple Location of the user in tuple(lat, lon). This represents the", "of elevation data to take loc[0] += delta loc[0] = ((loc[0] + 90)", "port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of the", "[(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row of pixel data output_pixels", "\"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\",", "Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\",", "visual indication that it works, for headless setups (green tape) bt = BlinkyTape(port,", "options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some visual indication that it", "0) + 0))) longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180) /", "management del a del im # Tape resets to stored pattern after a", "on Blinky Tape 5) shifts next row by a given latitude, also specified", "elevation data to take loc[0] += delta loc[0] = ((loc[0] + 90) %", "the row to center around the specified longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0)", "0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90) / (90", "60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"])", "PIL import Image import numpy as np import sys MAX_ERRORS = 3 num_errors", "channels) # map loc latitude to 0-based index latitude_index = min(rows - 1,", "im.size a = np.asarray(im) # of shape (rows, cols, channels) # map loc", "stored pattern after a few seconds of inactivity sleep(rate * 60) # Wait", "after a few seconds of inactivity sleep(rate * 60) # Wait specified number", "= ((loc[0] + 90) % 180) - 90 # wraps to next pole", "of the user in tuple(lat, lon). This represents the center of the LED", "mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0 In lieu of providing command", "next pole if overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \" +", "# flush any incomplete data bt.show() num_errors += 1 if num_errors > MAX_ERRORS:", "few seconds of inactivity sleep(rate * 60) # Wait specified number of minutes", "center of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often", "of pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row to", "pixel data output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row to center", "= [(int)(x*(cols/n_leds)) for x in range(n_leds)] # sample that row of pixel data", "from that row 3) shifts the sampled row data to center it at", "Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every update rate. May", "longitude output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to bt", "map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown parameters:", "while True: try: # first, load image im = Image.open(i_name) # Can be", "60) # Wait specified number of minutes # sleep(10) # Wait specified number", "+ str(longitude_index)) print(\"Next latitude: \" + str(loc[0])) # grab the applicable pixel indices", "# Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser()", "cols, rows = im.size a = np.asarray(im) # of shape (rows, cols, channels)", "str Name of the PNG image that contains the color coded pathymetric data.", "on Raspberry Pi is /dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port", "default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options,", "(or defaults) port = options.portname loc = options.location rate = options.update_rate delta =", "output_pixels = np.roll(output_pixels, longitude_index, axis=0) # send all pixel data to bt for", "pixels on Blinky Tape 5) shifts next row by a given latitude, also", "-d/--delta_latitude: int Vertical change in latitude every update rate. May be 0, but", "data. The file current named mapserv.png was obtained using the following API: https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0", "May be 0, but this will result in a never-changing LEDs. -i/--image: str", "1, max(0, (int)(((loc[1] - -180) / (180 - -180)) * (cols - 0)", "in latitude during update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs", "location of the next row of elevation data to take loc[0] += delta", "= 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f)", "next row of elevation data to take loc[0] += delta loc[0] = ((loc[0]", "to obtain row of pixel data from bathy image 2) samples a 'number", "alternatively edit the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021", "from blinkytape import BlinkyTape from time import sleep from PIL import Image import", "'COM5'. -d/--delta_latitude: int Vertical change in latitude every update rate. May be 0,", "of providing command line arguments, you may alternatively edit the defaults in bath_config.json.", "time import sleep from PIL import Image import numpy as np import sys", "This script will modulate the blinky lights using the following algorithm: 1) uses", "default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\",", "the center of the LED strip. Defaults to (0, 0) -u/--update-interval: int Update", "2) samples a 'number of LEDs' number of pixels from that row 3)", "to 0-based index latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90) /", "update (ex: 5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex:", "5)\", default=config[\"delta_latitude\"]) parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"])", "\"\"\" import optparse import json from blinkytape import BlinkyTape from time import sleep", "line arguments, you may alternatively edit the defaults in bath_config.json. NOTE: runs via:", "tuple(lat, lon). This represents the center of the LED strip. Defaults to (0,", "headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2)", "the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args: print(\"Unknown", "location specified by user 4) displays resulting pixels on Blinky Tape 5) shifts", "of the script, in minutes. Defaults to 10. -p/--port: str Serial port of", "color coded pathymetric data. The file current named mapserv.png was obtained using the", "bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\"", "'ttyAMA0', 'COM3'). Defaults to 'COM5'. -d/--delta_latitude: int Vertical change in latitude every update", "(green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show() sleep(2) while True:", "parser.add_option(\"-n\", \"--num-leds\", dest=\"num_leds\", help=\"Number of LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\",", "a = np.asarray(im) # of shape (rows, cols, channels) # map loc latitude", "4) displays resulting pixels on Blinky Tape 5) shifts next row by a", "import sleep from PIL import Image import numpy as np import sys MAX_ERRORS", "LEDs in strip (ex: 60)\", default=config[\"num_leds\"]) parser.add_option(\"-i\", \"--image\", dest=\"image_name\", help=\"Name of the map/bathymetry", "uses user-provided location to obtain row of pixel data from bathy image 2)", "via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev) MIT Licensed \"\"\" import optparse import", "parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\", \"--location\",", "-l/--location: tuple Location of the user in tuple(lat, lon). This represents the center", "= options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name = options.image_name # Some", "- -90) / (90 - -90)) * (rows - 0) + 0))) longitude_index", "* 60) # Wait specified number of minutes # sleep(10) # Wait specified", "90) % 180) - 90 # wraps to next pole if overflow print(\"Lat", "user 6) sleeps for user-specified period of time Uses the following arguments: -l/--location:", "max(0, (int)(((loc[1] - -180) / (180 - -180)) * (cols - 0) +", "to 10. -p/--port: str Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults", "im = Image.open(i_name) # Can be many different formats. cols, rows = im.size", "/dev/ttyACM0 parser = optparse.OptionParser() parser.add_option(\"-p\", \"--port\", dest=\"portname\", help=\"serial port (ex: /dev/ttyACM0)\", default=config[\"port\"]) parser.add_option(\"-l\",", "= options.image_name # Some visual indication that it works, for headless setups (green", "\"--location\", dest=\"location\", help=\"Location of the center of the LED strip (ex: 70,-110)\", default=config[\"location\"])", "using the following algorithm: 1) uses user-provided location to obtain row of pixel", "Tape 5) shifts next row by a given latitude, also specified by user", "'number of LEDs' number of pixels from that row 3) shifts the sampled", "algorithm: 1) uses user-provided location to obtain row of pixel data from bathy", "bt.sendPixel(*pixel) # finally, show the image bt.show() # delete variables for memory management", "(mins) (ex: 5)\", default=config[\"update_rate\"]) parser.add_option(\"-d\", \"--delta-latitude\", dest=\"delta_latitude\", help=\"Change in latitude during update (ex:", "(180 - -180)) * (cols - 0) + 0))) # update the location", "delete variables for memory management del a del im # Tape resets to", "of minutes except KeyboardInterrupt: print(\"Keyboard interrupt, ending program.\") sys.exit() except RuntimeError as e:", "# Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config = json.load(f) # Default", "of the map/bathymetry image (ex: ./mapserv.png)\", default=config[\"image\"]) (options, args) = parser.parse_args() if args:", "+ 0))) # update the location of the next row of elevation data", "sys.exit() except RuntimeError as e: print(\"Encountered runtime error: \" + e.args[0]) # flush", "strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile", "center of the LED strip. Defaults to (0, 0) -u/--update-interval: int Update interval", "b: {}\".format(*pixel)) bt.sendPixel(*pixel) # finally, show the image bt.show() # delete variables for", "output_pixels = np.take(a[latitude_index], indices, axis=0) # rotate the row to center around the", "default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\", help=\"How often to update elevation profile (mins) (ex: 5)\",", "contains the color coded pathymetric data. The file current named mapserv.png was obtained", "str(loc[0])) # grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in", "Defaults to (0, 0) -u/--update-interval: int Update interval of the script, in minutes.", "script will modulate the blinky lights using the following algorithm: 1) uses user-provided", "inactivity sleep(rate * 60) # Wait specified number of minutes # sleep(10) #", "sleeps for user-specified period of time Uses the following arguments: -l/--location: tuple Location", "row by a given latitude, also specified by user 6) sleeps for user-specified", "import json from blinkytape import BlinkyTape from time import sleep from PIL import", "overflow print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude:", "user in tuple(lat, lon). This represents the center of the LED strip. Defaults", "finally, show the image bt.show() # delete variables for memory management del a", "# grab the applicable pixel indices indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)]", "= parser.parse_args() if args: print(\"Unknown parameters: \" + args) # grab the values", "of minutes # sleep(10) # Wait specified number of minutes except KeyboardInterrupt: print(\"Keyboard", "change in latitude every update rate. May be 0, but this will result", "args) # grab the values provided by user (or defaults) port = options.portname", "print(\"Lat index: \" + str(latitude_index)) print(\"Lon index: \" + str(longitude_index)) print(\"Next latitude: \"", "sleep(2) while True: try: # first, load image im = Image.open(i_name) # Can", "sys MAX_ERRORS = 3 num_errors = 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\")", "= options.location rate = options.update_rate delta = options.delta_latitude n_leds = options.num_leds i_name =", "the location of the next row of elevation data to take loc[0] +=", "tuple Location of the user in tuple(lat, lon). This represents the center of", "script, in minutes. Defaults to 10. -p/--port: str Serial port of the BlinkyLight", "the defaults in bath_config.json. NOTE: runs via: runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/') (C) 2021 <NAME> (https://joeycodes.dev)", "default=config[\"port\"]) parser.add_option(\"-l\", \"--location\", dest=\"location\", help=\"Location of the center of the LED strip (ex:", "90 # wraps to next pole if overflow print(\"Lat index: \" + str(latitude_index))", "axis=0) # send all pixel data to bt for pixel in output_pixels: print(\"Sending", "of the center of the LED strip (ex: 70,-110)\", default=config[\"location\"]) parser.add_option(\"-u\", \"--update-rate\", dest=\"update_rate\",", "3 num_errors = 0 # Obtain default parameters with open(\"./bathymetry_blink/bathy_config.json\") as f: config", "interrupt, ending program.\") sys.exit() except RuntimeError as e: print(\"Encountered runtime error: \" +", "for headless setups (green tape) bt = BlinkyTape(port, n_leds) bt.displayColor(0, 100, 0) bt.show()", "lights using the following algorithm: 1) uses user-provided location to obtain row of" ]
[ "in data: # flatten list values if isinstance(data[key], list): if len(data[key]) > 0:", "FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed Fire Rating if field_key", "fields \"\"\" output = {} if data: data = self.set_pts_fields(data) for key in", "csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field,", "in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key", "number elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key])", "Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd from .transform import TransformBase", "= self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key", "format data fields \"\"\" output = {} if data: data = self.set_pts_fields(data) for", "for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else:", "output)) output = [i for i in output if i is not None]", "# format phone numbers and building application number elif phone_appnum_key is not None:", "dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False,", "Rating and proposed Fire Rating if field_key == 'construction_type' and data[key] != '':", "'': output = self.add_fire_rating(key, data[key], output) # format phone numbers and building application", "sep): \"\"\" transform submissions from export \"\"\" output = list(map(self.get_data, data)) output =", "data)) output = list(map(self.pretty_format, output)) output = [i for i in output if", "submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data'] output['id'] = submission['_id'] output['created']", "not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed", "and building application number elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields':", "!= 'existingPermitApplication': output = {} data = submission['data'] output['id'] = submission['_id'] output['created'] =", "address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects = []", "and data[key] != '': output = self.add_fire_rating(key, data[key], output) # format phone numbers", "is not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters", "\"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data):", "elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) #", "characters that break the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|',", "\"\"\" Transform for Export Submissions \"\"\" def transform(self, data, sep): \"\"\" transform submissions", "for key in data: # flatten list values if isinstance(data[key], list): if len(data[key])", "# if storage, concat filename if 'storage' in val and 'originalName' in val:", "if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] =", "Export Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd from .transform", "export \"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i", "= {} if data: data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]):", "= self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key", "import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\"", "if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data'] output['id']", "multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] =", "phone numbers and building application number elif phone_appnum_key is not None: if phone_appnum_key", "output[key+str(index+1)] = val if len(file_names) > 0: output[key] = ', '.join(file_names) # flatten", "..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export", "the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel", "return output # pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data from submission", "return output def normalize(self, data): \"\"\" Normalize data into a flat structure into", "relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output", "i in output if i is not None] output = self.normalize(output) output =", "pretty_format(self, data): \"\"\" Pretty format data fields \"\"\" output = {} if data:", "data, sep): \"\"\" transform submissions from export \"\"\" output = list(map(self.get_data, data)) output", "output) # format phone numbers and building application number elif phone_appnum_key is not", "output = self.normalize(output) output = self.to_csv(output, sep) return output # pylint: disable=R0201 def", "output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i for i", "'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if", "multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key] return output def", "phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the csv", "is not None] output = self.normalize(output) output = self.to_csv(output, sep) return output #", "= ', '.join(map(str, data[key])) else: file_names = [] for index, val in enumerate(data[key]):", "val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0: output[key] = ',", "data: data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key])", "# relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key)", "dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame", "data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key", "isinstance(data[key], dict): # building use code needs manual process if FieldConfigs.is_building_use(key): output[key] =", "= submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten list values if", "= self.normalize(output) output = self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self,", "import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def transform(self, data,", "fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects = [] for", "file_names = [] for index, val in enumerate(data[key]): # if storage, concat filename", "\"\"\" # skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and", "submission object \"\"\" # skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if", "FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key, multi_value", "[] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects)", "Rating if field_key == 'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key],", "'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add", "# skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType']", "self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is", "> 0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names", "# flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else:", "output if i is not None] output = self.normalize(output) output = self.to_csv(output, sep)", "= list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i for i in", "[i for i in output if i is not None] output = self.normalize(output)", "to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n')", "field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating", "\"\"\" #pylint: disable=too-few-public-methods import pandas as pd from .transform import TransformBase from ..resources.field_configs", "pandas as pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps", "list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i for i in output", "0: output[key] = ', '.join(file_names) # flatten multi select values elif isinstance(data[key], dict):", "return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data fields \"\"\"", "{} data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for", "data[key] != '': output = self.add_fire_rating(key, data[key], output) # format phone numbers and", "= pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe,", "pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data from submission object \"\"\" #", "normalize(self, data): \"\"\" Normalize data into a flat structure into DataFrame \"\"\" dataframe", "proposed Fire Rating if field_key == 'construction_type' and data[key] != '': output =", "FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\"", "concat filename if 'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)]", "if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key] return output", "FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def transform(self, data, sep):", "data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field:", "submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten list values if isinstance(data[key],", "data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output)", "data from submission object \"\"\" # skip permit type = existingPermitApplication submissions #pylint:", "data into a flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update", "if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key,", "dict): # building use code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key,", "return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\" return", "None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed Fire", "'') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] =", "and proposed Fire Rating if field_key == 'construction_type' and data[key] != '': output", "bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary relabel_field =", "select values elif isinstance(data[key], dict): # building use code needs manual process if", "i is not None] output = self.normalize(output) output = self.to_csv(output, sep) return output", "ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def transform(self, data, sep): \"\"\" transform", "enumerate(data[key]): # if storage, concat filename if 'storage' in val and 'originalName' in", "FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] =", "def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep,", "field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None:", "disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data']", "add Fire Rating and proposed Fire Rating if field_key == 'construction_type' and data[key]", "if storage, concat filename if 'storage' in val and 'originalName' in val: file_names.append(val['originalName'])", "not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that", "#pylint: disable=too-few-public-methods import pandas as pd from .transform import TransformBase from ..resources.field_configs import", "data fields \"\"\" output = {} if data: data = self.set_pts_fields(data) for key", "(int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names = [] for index,", "output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key)", "Fire Rating if field_key == 'construction_type' and data[key] != '': output = self.add_fire_rating(key,", "Pretty format data fields \"\"\" output = {} if data: data = self.set_pts_fields(data)", "if data: data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] =", "output = self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self, submission): \"\"\"", "'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key], output) # format phone", "output = list(map(self.pretty_format, output)) output = [i for i in output if i", "relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output = self.reorder_fields(output) return output", "not None] output = self.normalize(output) output = self.to_csv(output, sep) return output # pylint:", "= submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in", "object \"\"\" # skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType']", "needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested", "\"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output = [i for", "Submissions \"\"\" def transform(self, data, sep): \"\"\" transform submissions from export \"\"\" output", ".transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase):", "output) else: multi_selects = [] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key)", "submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data", "<filename>service/transforms/export_submissions.py \"\"\" Export Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd", "output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed Fire Rating", "pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps", "from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for", "submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data:", "Export Submissions \"\"\" def transform(self, data, sep): \"\"\" transform submissions from export \"\"\"", "output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten list values", "if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire", "'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif isinstance(data[key],", "\"\"\" Export Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd from", "format phone numbers and building application number elif phone_appnum_key is not None: if", "import pandas as pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs from", "= existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output =", "key in data: # flatten list values if isinstance(data[key], list): if len(data[key]) >", "from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format", "Normalize data into a flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) #", "..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def transform(self,", "DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe", "[] for index, val in enumerate(data[key]): # if storage, concat filename if 'storage'", "update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return", "permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication':", "output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif isinstance(data[key], (str,", "multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key] return output def normalize(self,", "submission): \"\"\" Get data from submission object \"\"\" # skip permit type =", "output = self.add_fire_rating(key, data[key], output) # format phone numbers and building application number", "# pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data from submission object \"\"\"", "self.add_fire_rating(key, data[key], output) # format phone numbers and building application number elif phone_appnum_key", "from .transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class", "= ', '.join(file_names) # flatten multi select values elif isinstance(data[key], dict): # building", "', '.join(file_names) # flatten multi select values elif isinstance(data[key], dict): # building use", "file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0: output[key] = ', '.join(file_names)", "manually add Fire Rating and proposed Fire Rating if field_key == 'construction_type' and", "Submissions Transform module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd from .transform import", "> 0: output[key] = ', '.join(file_names) # flatten multi select values elif isinstance(data[key],", "data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key],", "in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names)", "'\\t').replace('|', '') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field]", "storage, concat filename if 'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else:", "dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data fields \"\"\" output", "structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True)", "if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested address fields elif", "= ', '.join(multi_selects) else: output[key] = data[key] return output def normalize(self, data): \"\"\"", "for i in output if i is not None] output = self.normalize(output) output", "submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data'] output['id'] =", "= {} data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks", "values elif isinstance(data[key], dict): # building use code needs manual process if FieldConfigs.is_building_use(key):", "DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data", "def normalize(self, data): \"\"\" Normalize data into a flat structure into DataFrame \"\"\"", "= list(map(self.pretty_format, output)) output = [i for i in output if i is", "self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key =", "flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects", "transform(self, data, sep): \"\"\" transform submissions from export \"\"\" output = list(map(self.get_data, data))", "Fire Rating and proposed Fire Rating if field_key == 'construction_type' and data[key] !=", "else: multi_selects = [] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key]", "if len(file_names) > 0: output[key] = ', '.join(file_names) # flatten multi select values", "multi select values elif isinstance(data[key], dict): # building use code needs manual process", "elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if", "', '.join(multi_selects) else: output[key] = data[key] return output def normalize(self, data): \"\"\" Normalize", "len(file_names) > 0: output[key] = ', '.join(file_names) # flatten multi select values elif", "values if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key]", "sep=','): \"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self,", "\"\"\" def transform(self, data, sep): \"\"\" transform submissions from export \"\"\" output =", "= submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten", "CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty", "= data[key] return output def normalize(self, data): \"\"\" Normalize data into a flat", "= self.convert_building_use(key, data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output =", "# flatten list values if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0],", "= val if len(file_names) > 0: output[key] = ', '.join(file_names) # flatten multi", "output = {} data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created'] #pylint:", "type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output", "phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse", "== 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif", "output[key] = ', '.join(multi_selects) else: output[key] = data[key] return output def normalize(self, data):", "elif isinstance(data[key], dict): # building use code needs manual process if FieldConfigs.is_building_use(key): output[key]", "filename if 'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] =", "!= '': output = self.add_fire_rating(key, data[key], output) # format phone numbers and building", "transform submissions from export \"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output))", "val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) >", "if i is not None] output = self.normalize(output) output = self.to_csv(output, sep) return", "submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data: # flatten list", "elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key,", "data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key] return", "phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key])", "self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty')", "disable=R0201 def get_data(self, submission): \"\"\" Get data from submission object \"\"\" # skip", "nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key, data[key], output) else: multi_selects =", "column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV", "necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output = self.reorder_fields(output) return", "break the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') #", "#pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {} data =", "is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and", "= FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key]", "= [i for i in output if i is not None] output =", "if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names = []", "list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str,", "data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else:", "= FieldMaps.map_key_value(field_key, data[key]) # manually add Fire Rating and proposed Fire Rating if", "else: output[key] = data[key] return output def normalize(self, data): \"\"\" Normalize data into", "cleanse characters that break the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n',", "numbers and building application number elif phone_appnum_key is not None: if phone_appnum_key ==", "len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else:", "into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return", "a flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column names", "isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names = [] for", "output = self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key, multi_value in", "class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def transform(self, data, sep): \"\"\"", "if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break the", "', '.join(map(str, data[key])) else: file_names = [] for index, val in enumerate(data[key]): #", "# update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\"", "in enumerate(data[key]): # if storage, concat filename if 'storage' in val and 'originalName'", "and submission['data']['permitType'] != 'existingPermitApplication': output = {} data = submission['data'] output['id'] = submission['_id']", "# building use code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key],", "else: file_names = [] for index, val in enumerate(data[key]): # if storage, concat", "multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key]", "in output if i is not None] output = self.normalize(output) output = self.to_csv(output,", "str)): output[key] = ', '.join(map(str, data[key])) else: file_names = [] for index, val", "= self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self, submission): \"\"\" Get", "= [] for index, val in enumerate(data[key]): # if storage, concat filename if", "import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions", "multi_selects = [] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] =", "for Export Submissions \"\"\" def transform(self, data, sep): \"\"\" transform submissions from export", "application number elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key] =", "'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key,", "# cleanse characters that break the csv elif isinstance(data[key], (str, bytes)): output[key] =", "process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested address fields", "data[key] return output def normalize(self, data): \"\"\" Normalize data into a flat structure", "isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] = ',", "output[key] = ', '.join(map(str, data[key])) else: file_names = [] for index, val in", "#pylint: disable=too-many-nested-blocks for key in data: # flatten list values if isinstance(data[key], list):", "self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key, multi_value in data[key].items(): if", "else: output[key+str(index+1)] = val if len(file_names) > 0: output[key] = ', '.join(file_names) #", "building application number elif phone_appnum_key is not None: if phone_appnum_key == 'phone_fields': output[key]", "data): \"\"\" Pretty format data fields \"\"\" output = {} if data: data", "= self.add_fire_rating(key, data[key], output) # format phone numbers and building application number elif", "= self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif isinstance(data[key], (str, bytes)):", "None] output = self.normalize(output) output = self.to_csv(output, sep) return output # pylint: disable=R0201", "from submission object \"\"\" # skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks", "if len(data[key]) > 0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key]))", "self.convert_building_use(key, data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output = self.convert_address_fields(key,", "== 'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key], output) # format", "list(map(self.pretty_format, output)) output = [i for i in output if i is not", "output = [i for i in output if i is not None] output", "flatten multi select values elif isinstance(data[key], dict): # building use code needs manual", "'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0: output[key]", "output def normalize(self, data): \"\"\" Normalize data into a flat structure into DataFrame", "self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data", "output = {} if data: data = self.set_pts_fields(data) for key in data: if", "'.join(file_names) # flatten multi select values elif isinstance(data[key], dict): # building use code", "def transform(self, data, sep): \"\"\" transform submissions from export \"\"\" output = list(map(self.get_data,", "\"\"\" output = {} if data: data = self.set_pts_fields(data) for key in data:", "'.join(map(str, data[key])) else: file_names = [] for index, val in enumerate(data[key]): # if", "flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string,", "# manually add Fire Rating and proposed Fire Rating if field_key == 'construction_type'", "'.join(multi_selects) else: output[key] = data[key] return output def normalize(self, data): \"\"\" Normalize data", "names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from", "data[key], output) else: multi_selects = [] for multi_key, multi_value in data[key].items(): if multi_value:", "for key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key,", "\"\"\" Get data from submission object \"\"\" # skip permit type = existingPermitApplication", "\"\"\" dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def", "output[key] = data[key] return output def normalize(self, data): \"\"\" Normalize data into a", "= [] for multi_key, multi_value in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ',", "code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten", "into a flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data) # update column", "get_data(self, submission): \"\"\" Get data from submission object \"\"\" # skip permit type", "def get_data(self, submission): \"\"\" Get data from submission object \"\"\" # skip permit", "use code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) #", "in data[key].items(): if multi_value: multi_selects.append(multi_key) output[key] = ', '.join(multi_selects) else: output[key] = data[key]", "if 'storage' in val and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val", "field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output =", "isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary", "and 'originalName' in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0:", "Transform for Export Submissions \"\"\" def transform(self, data, sep): \"\"\" transform submissions from", "existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] != 'existingPermitApplication': output = {}", "if field_key == 'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key], output)", "output[key] = ', '.join(file_names) # flatten multi select values elif isinstance(data[key], dict): #", "data[key])) else: file_names = [] for index, val in enumerate(data[key]): # if storage,", "val if len(file_names) > 0: output[key] = ', '.join(file_names) # flatten multi select", "= data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if", "TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform", "pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self, dataframe, sep=','):", "disable=too-few-public-methods import pandas as pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs", "skip permit type = existingPermitApplication submissions #pylint: disable=too-many-nested-blocks if submission['data']['permitType'] and submission['data']['permitType'] !=", "module \"\"\" #pylint: disable=too-few-public-methods import pandas as pd from .transform import TransformBase from", "field_key == 'construction_type' and data[key] != '': output = self.add_fire_rating(key, data[key], output) #", "val in enumerate(data[key]): # if storage, concat filename if 'storage' in val and", "building use code needs manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data)", "list values if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int, str)):", "output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if", "dataframe = pd.json_normalize(data) # update column names dataframe.rename(columns=self.pretty_string, inplace=True) return dataframe def to_csv(self,", "= FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) #", "output['id'] = submission['_id'] output['created'] = submission['created'] #pylint: disable=too-many-nested-blocks for key in data: #", "line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data fields \"\"\" output = {}", "data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key =", "{} if data: data = self.set_pts_fields(data) for key in data: if self.datetime_valid(data[key]): output[key]", "data[key]) # manually add Fire Rating and proposed Fire Rating if field_key ==", "in val: file_names.append(val['originalName']) else: output[key+str(index+1)] = val if len(file_names) > 0: output[key] =", "inplace=True) return dataframe def to_csv(self, dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\"", "if necessary relabel_field = FieldConfigs.get_relabel_fields(key) if relabel_field: output[relabel_field] = output.pop(key) output = self.reorder_fields(output)", "\"\"\" Pretty format data fields \"\"\" output = {} if data: data =", "as pd from .transform import TransformBase from ..resources.field_configs import FieldConfigs from ..resources.field_maps import", "submissions from export \"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output", "self.pretty_phonenumber(data[key]) # cleanse characters that break the csv elif isinstance(data[key], (str, bytes)): output[key]", "for index, val in enumerate(data[key]): # if storage, concat filename if 'storage' in", "def pretty_format(self, data): \"\"\" Pretty format data fields \"\"\" output = {} if", "\"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data fields", "sep) return output # pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data from", "0: if isinstance(data[key][0], (int, str)): output[key] = ', '.join(map(str, data[key])) else: file_names =", "from export \"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format, output)) output =", "\"\"\" Normalize data into a flat structure into DataFrame \"\"\" dataframe = pd.json_normalize(data)", "that break the csv elif isinstance(data[key], (str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '')", "flatten list values if isinstance(data[key], list): if len(data[key]) > 0: if isinstance(data[key][0], (int,", "output # pylint: disable=R0201 def get_data(self, submission): \"\"\" Get data from submission object", "Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\"", "key in data: if self.datetime_valid(data[key]): output[key] = self.pretty_time(data[key]) else: field_key = FieldConfigs.get_field_key(key, 'map')", "= self.convert_address_fields(key, data[key], output) else: multi_selects = [] for multi_key, multi_value in data[key].items():", "data[key], output) # format phone numbers and building application number elif phone_appnum_key is", "self.normalize(output) output = self.to_csv(output, sep) return output # pylint: disable=R0201 def get_data(self, submission):", "Get data from submission object \"\"\" # skip permit type = existingPermitApplication submissions", "dataframe, sep=','): \"\"\" Return CSV from DataFrame \"\"\" return dataframe.to_csv(index=False, sep=sep, line_terminator='\\r\\n') def", "(str, bytes)): output[key] = data[key].replace('\\n', '\\t').replace('|', '') # relabel field, if necessary relabel_field", "'existingPermitApplication': output = {} data = submission['data'] output['id'] = submission['_id'] output['created'] = submission['created']", "data): \"\"\" Normalize data into a flat structure into DataFrame \"\"\" dataframe =", "output[key] = self.convert_building_use(key, data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key): output", "disable=too-many-nested-blocks for key in data: # flatten list values if isinstance(data[key], list): if", "manual process if FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested address", "FieldConfigs.is_building_use(key): output[key] = self.convert_building_use(key, data[key], data) # flatten nested address fields elif FieldConfigs.is_nested_address_field(key):", "else: field_key = FieldConfigs.get_field_key(key, 'map') phone_appnum_key = FieldConfigs.get_field_key(key, 'pretty') if field_key is not", "FieldConfigs.get_field_key(key, 'pretty') if field_key is not None: output[key] = FieldMaps.map_key_value(field_key, data[key]) # manually", "# flatten multi select values elif isinstance(data[key], dict): # building use code needs", "sep=sep, line_terminator='\\r\\n') def pretty_format(self, data): \"\"\" Pretty format data fields \"\"\" output =", "index, val in enumerate(data[key]): # if storage, concat filename if 'storage' in val", "data: # flatten list values if isinstance(data[key], list): if len(data[key]) > 0: if", "None: if phone_appnum_key == 'phone_fields': output[key] = self.pretty_phonenumber(data[key]) # cleanse characters that break", "from ..resources.field_maps import FieldMaps class ExportSubmissionsTransform(TransformBase): \"\"\" Transform for Export Submissions \"\"\" def", "\"\"\" transform submissions from export \"\"\" output = list(map(self.get_data, data)) output = list(map(self.pretty_format," ]
[ "def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions =", "1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions =", "pytest import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer", "predictions.count() == 3 if __name__ == \"__main__\": import pytest import sys sys.exit(pytest.main([\"-v\", \"-x\",", "\"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def", "1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config,", "train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1)", "result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint)", "assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model)", "import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import", "# If loading from a state dict, a model definition must be passed", "assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs =", "import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info #", "1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result", "compute=\"actors\" ) assert predictions.count() == 3 if __name__ == \"__main__\": import pytest import", "\"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def", "from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train from", "ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x,", "return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" )", "ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if", "TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x):", "= 3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\":", "{\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset =", "= linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers", "torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config )", "= {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs}", "TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def", "definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred", "x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count()", "result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers}", "will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def", "TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading from a state", "teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result =", "TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset =", "trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model", ") assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict()", "4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus):", "= TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\",", "must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred =", "= predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if __name__ ==", "TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\"", "{\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If", "torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config )", "1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit()", "import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import", "scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred", "train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The", "@pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result)", "model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions", "= {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func,", "yield address_info # The code after the yield will run as teardown code.", "= ray.init(num_cpus=4) yield address_info # The code after the yield will run as", "# The code after the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\",", "ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert", "predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if __name__ == \"__main__\":", "def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return", "code after the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2])", "config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer(", "= trainer.fit() # If loading from a state dict, a model definition must", "run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config):", "batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model =", ") def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches(", "ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code after the yield will", "train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self):", "def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" )", "scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config", "batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if __name__ == \"__main__\": import pytest", "3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4,", "linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code after", "scaling_config=scaling_config ) result = trainer.fit() # If loading from a state dict, a", "model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self):", "@pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code after the", "\"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, )", "train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer(", "ray.init(num_cpus=4) yield address_info # The code after the yield will run as teardown", "3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\":", "predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() ==", "torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray", "state dict, a model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class", "import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info =", "def train_func(config): result = linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"]", "model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func,", "dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def", "loading from a state dict, a model definition must be passed in. with", "def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer,", "a state dict, a model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint)", "predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus):", "code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config)", "train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading from a state dict,", "= predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def", "result = trainer.fit() # If loading from a state dict, a model definition", "self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float)", "dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count()", "__init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x,", "train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result =", "ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example", "train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model)", "class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self,", "ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info", "= TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer:", "num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer =", "= {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset", "= TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading from a", "from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus():", "class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float)", "import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from", "yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers):", "def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2}", "test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"]", "assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config = {\"num_workers\":", "result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers} config =", "TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model", "2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading", "TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func", "trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\":", "= TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset", "= ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return", "predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func():", "x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\"", "= ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3", "import pytest import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import", "__call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\",", "linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs", "= torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config", "predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if __name__", "<reponame>mgelbart/ray<gh_stars>10-100 import pytest import torch import ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch", "epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config =", "train_func(config): result = linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers", "[1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result) ==", "ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func", "result.checkpoint, model=torch.nn.Linear(1, 1) ) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3)", "TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1,", "self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3", "ray from ray.ml.predictors.integrations.torch import TorchPredictor from ray.ml.train.integrations.torch import TorchTrainer from ray import train", ") trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config =", "scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit()", "= {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() #", "< result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers} config", "as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result", "a model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def", "import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture", "from ray.ml.train.integrations.torch import TorchTrainer from ray import train from ray.ml.examples.pytorch.torch_linear_example import train_func as", "len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs = 3", "{\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer", "trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class", "model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func,", "epochs = 3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1,", "The code after the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1,", "def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result) == epochs assert", "dict, a model definition must be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer:", "trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() # If loading from", "TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() == 3 if __name__ == \"__main__\": import", ") assert predictions.count() == 3 if __name__ == \"__main__\": import pytest import sys", "result = linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers =", "If loading from a state dict, a model definition must be passed in.", "num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\":", "return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert predictions.count() ==", "== 3 if __name__ == \"__main__\": import pytest import sys sys.exit(pytest.main([\"-v\", \"-x\", __file__]))", "test_torch_e2e(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer", "from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield", "def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer =", "epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func():", "self.pred.predict(x, dtype=torch.float) predict_dataset = ray.data.range(3) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert", "num_workers): def train_func(config): result = linear_train_func(config) assert len(result) == epochs assert result[-1][\"loss\"] <", "be passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(", "in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1,", "{\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\": epochs} trainer = TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config,", "with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1)", ") result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred =", "trainer.fit() # If loading from a state dict, a model definition must be", ") result = trainer.fit() # If loading from a state dict, a model", "trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self,", "predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x):", "predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config", "== 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config =", "pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) )", "= trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def", "address_info = ray.init(num_cpus=4) yield address_info # The code after the yield will run", "self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer,", "TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions", "assert predictions.count() == 3 if __name__ == \"__main__\": import pytest import sys sys.exit(pytest.main([\"-v\",", "address_info # The code after the yield will run as teardown code. ray.shutdown()", "def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code after the yield", "scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2, \"hidden_size\": 1, \"batch_size\": 4, \"epochs\":", "def train_func(): model = torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer =", "test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer", "TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint, model=torch.nn.Linear(1, 1) ) def", "2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3)", "ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info", "__call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches( TorchScorer, batch_format=\"pandas\", compute=\"actors\" ) assert", "= torch.nn.Linear(1, 1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config", "compute=\"actors\" ) assert predictions.count() == 3 def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1,", "as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4) yield address_info # The code", "passed in. with pytest.raises(ValueError): TorchPredictor.from_checkpoint(result.checkpoint) class TorchScorer: def __init__(self): self.pred = TorchPredictor.from_checkpoint( result.checkpoint,", "num_workers = num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\":", "train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer(", "= num_workers epochs = 3 scaling_config = {\"num_workers\": num_workers} config = {\"lr\": 1e-2,", "the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def test_torch_linear(ray_start_4_cpus,", "def test_torch_e2e_state_dict(ray_start_4_cpus): def train_func(): model = torch.nn.Linear(1, 1).state_dict() train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2}", "from a state dict, a model definition must be passed in. with pytest.raises(ValueError):", "== epochs assert result[-1][\"loss\"] < result[0][\"loss\"] num_workers = num_workers epochs = 3 scaling_config", "train from ray.ml.examples.pytorch.torch_linear_example import train_func as linear_train_func @pytest.fixture def ray_start_4_cpus(): address_info = ray.init(num_cpus=4)", "after the yield will run as teardown code. ray.shutdown() @pytest.mark.parametrize(\"num_workers\", [1, 2]) def", "= TorchTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=scaling_config, ) trainer.fit() def test_torch_e2e(ray_start_4_cpus): def train_func(): model =", "__init__(self): self.pred = TorchPredictor.from_checkpoint(result.checkpoint) def __call__(self, x): return self.pred.predict(x, dtype=torch.float) predictions = predict_dataset.map_batches(", "2]) def test_torch_linear(ray_start_4_cpus, num_workers): def train_func(config): result = linear_train_func(config) assert len(result) == epochs", "TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result = trainer.fit() predict_dataset = ray.data.range(3) class TorchScorer: def", "1) train.save_checkpoint(model=model) scaling_config = {\"num_workers\": 2} trainer = TorchTrainer( train_loop_per_worker=train_func, scaling_config=scaling_config ) result" ]
[ "IxNetwork model allows for multiple method Signatures with the same name while python", "value) @property def LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns ------- -", "the property is accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP =", "bool \"\"\" Returns ------- - bool: Sends learning frames to MAC address only.", "Specifies the rate at which IxNetwork sends learn frames to the DUT. \"\"\"", "None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None,", "(number): Specifies the number of learn frames that IxNetwork sends through fast path.", "in ms that IxNetwork pauses before sending all the Raises ------ - ServerError:", "LearnFrames \"\"\"Updates learnFrames resource on the server. Args ---- - FastPathEnable (bool): If", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: ()", "IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): #", "the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to", "fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) ->", "\"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize':", "() -> int \"\"\" Returns ------- - number: Specifies the size of the", "(*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on the server. Applies the", "from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type:", "# of this software and associated documentation files (the \"Software\"), # to deal", "= item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any,", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "*args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on", "before sending all the learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number):", "@property def FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns ------- - number:", "resource which will be retrieved from the server every time the property is", "the Connection class will block until the operation is complete. - Returns str:", "which IxNetwork sends learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def", "server. Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True", "# type: () -> bool \"\"\" Returns ------- - bool: Sends learning frames", "path transmit. - FastPathLearnFrameSize (number): Specifies the size of the learning frames in", "the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously.", "------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to", "frames to the DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC address", "before sending all the learning frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime'])", "-> Union[List[str], None] \"\"\"Executes the waitForTest operation on the server. Waits for the", "'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent,", "and waits for its execution to finish. The IxNetwork model allows for multiple", "self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "type: (*Any, **Any) -> None \"\"\"Executes the start operation on the server. Starts", "FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type:", "# type: () -> bool \"\"\" Returns ------- - bool: If true, enables", "(bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () -> int", "and this permission notice shall be included in # all copies or substantial", "the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the", "The server has encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def", "does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "def LearnFrameSize(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "length of time in ms that IxNetwork pauses before sending all the \"\"\"", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type:", "Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any", "from uhd_restpy.base import Base from uhd_restpy.files import Files from typing import List, Any,", "update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None):", "every time the property is accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames'", "FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "is synchronous and returns the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- -", "Returns list(str): This method is synchronous and returns the result of the test.", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args,", "all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms", "sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type:", "return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value)", "LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int,", "of learning frames that IxNetwork sends for each address. - LearnRate (number): Specifies", "value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type:", "applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "@LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def", "# and/or sell copies of the Software, and to permit persons to whom", "FastPathEnable (bool): If true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the", "LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to choose", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args,", "test. - LearnNumFrames (number): Specifies the number of learning frames that IxNetwork sends", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "# Software is furnished to do so, subject to the following conditions: #", "self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "(*Any, **Any) -> None \"\"\"Executes the stop operation on the server. Stops the", "- number: Specifies the number of learning frames that IxNetwork sends for each", "self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns -------", "execution of the specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation", "encapsulates a required learnFrames resource which will be retrieved from the server every", "is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of the", "**Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on the server. Applies the specified", "-> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ -", "self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None", "() -> bool \"\"\" Returns ------- - bool: Sends learning frames to MAC", "fast path. - LearnFrameSize (number): Specifies the size of the learning frames. -", "'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate':", "frames that IxNetwork sends for each address. - LearnRate (number): Specifies the rate", "encountered an uncategorized error condition \"\"\" payload = { \"Arg1\": self.href } for", "IxNetwork sends learning frames during the test. - LearnNumFrames (number): Specifies the number", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "\"\"\" Returns ------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial):", "only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None", "THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base", "\"\"\" Returns ------- - number: Specifies the number of learn frames that IxNetwork", "payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] =", "uncategorized error condition \"\"\" payload = { \"Arg1\": self.href } for i in", "of time in ms that IxNetwork pauses before sending all the Raises ------", "Specifies the rate at which IxNetwork sends learn frames to the DUT. -", "**Any) -> None \"\"\"Executes the apply operation on the server. Applies the specified", "response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes", "use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "size of the learning frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter", "(*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run operation on the server. Starts", "python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the operation", "transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'],", "int, int, int, int, str, int, int, bool, bool, int, int) -> LearnFrames", "Test and waits for its execution to finish. The IxNetwork model allows for", "Specifies the size of the learning frames. - LearnFrequency (str(never | onBinaryIteration |", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args,", "self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () -> bool \"\"\" Returns -------", "rate at which IxNetwork sends learn frames to the DUT. - LearnSendMacOnly (bool):", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: ()", "'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest',", "# type: () -> int \"\"\" Returns ------- - number: Specifies the size", "self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property", "Returns bool: Raises ------ - NotFoundError: The requested resource does not exist on", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () ->", "an uncategorized error condition \"\"\" payload = { \"Arg1\": self.href } for i", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync',", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "IxNetwork sends learn frames through fast path. - LearnFrameSize (number): Specifies the size", "allows for multiple method Signatures with the same name while python does not.", "learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): #", "type: () -> str \"\"\" Returns ------- - str(never | onBinaryIteration | oncePerFramesize", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop',", "self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "until the operation is complete. - Returns bool: Raises ------ - NotFoundError: The", "Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute", "applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () ->", "documentation files (the \"Software\"), # to deal in the Software without restriction, including", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () -> bool", "int) -> LearnFrames \"\"\"Updates learnFrames resource on the server. Args ---- - FastPathEnable", "def Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the stop", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "sends through fast path. - FastPathRate (number): Specifies the rate at which IxNetwork", "type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport operation on the server.", "for each address. - LearnRate (number): Specifies the rate at which IxNetwork sends", "**Any) -> Union[List[str], None] \"\"\"Executes the run operation on the server. Starts the", "self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property", "rest api calls made through the Connection class will block until the operation", "to any person obtaining a copy # of this software and associated documentation", "IxNetwork pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value):", "= 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate',", "ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation", "of the learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest", "| oncePerFramesize | oncePerTest | onTrial)): Allows to choose how frequently IxNetwork sends", "payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None]", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input", "------- - bool: If true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter", "type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on the server. Applies", "restriction, including without limitation # the rights to use, copy, modify, merge, publish,", "the size of the learning frames in the fast path. - FastPathNumFrames (number):", "\"\"\"Executes the start operation on the server. Starts the specified Quick Test. The", "the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True", "type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run operation on the server.", "() -> bool \"\"\" Returns ------- - bool: If true, enables fast path", "- LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms that IxNetwork pauses", "that IxNetwork sends through fast path. - FastPathRate (number): Specifies the rate at", "router solicitation messages. - LearnWaitTime (number): Specifies the length of time in ms", "the Software, and to permit persons to whom the # Software is furnished", "def LearnFrequency(self): # type: () -> str \"\"\" Returns ------- - str(never |", "*args, **kwargs): # type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport operation", "until the operation is complete. - Returns list(str): This method is synchronous and", "until the operation is complete. - Returns str: This method is asynchronous and", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: ()", "class will block until the operation is complete. Raises ------ - NotFoundError: The", "value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns ------- -", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the apply operation on the", "complete. Raises ------ - NotFoundError: The requested resource does not exist on the", "server every time the property is accessed. \"\"\" __slots__ = () _SDM_NAME =", "Union[str, None] \"\"\"Executes the generateReport operation on the server. Generate a PDF report", "waitForTest operation on the server. Waits for the execution of the specified Quick", "(str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to choose how", "until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input", "() -> int \"\"\" Returns ------- - number: Specifies the number of learn", "granted, free of charge, to any person obtaining a copy # of this", "server. Waits for the execution of the specified Quick Test to be completed.", "locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the", "self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property", "int \"\"\" Returns ------- - number: Specifies the rate at which IxNetwork sends", "IxNetwork sends during the test. The LearnFrames class encapsulates a required learnFrames resource", "learning frames that IxNetwork sends during the test. The LearnFrames class encapsulates a", "# Permission is hereby granted, free of charge, to any person obtaining a", "payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type:", "*args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the stop operation on", "None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "server has encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self,", "(bool): Sends learning frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends router", "| onTrial): Allows to choose how frequently IxNetwork sends learning frames during the", "on the server. Starts the specified Quick Test. The IxNetwork model allows for", "return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any) ->", "operation on the server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation", "the server. Starts the specified Quick Test. The IxNetwork model allows for multiple", "specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the operation", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type:", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'],", "the rate at which IxNetwork sends learn frames to the DUT. - LearnSendMacOnly", "learning frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value):", "be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "ServerError: The server has encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))", "including without limitation # the rights to use, copy, modify, merge, publish, distribute,", "= item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any,", "is furnished to do so, subject to the following conditions: # # The", "fast path transmit. - FastPathLearnFrameSize (number): Specifies the size of the learning frames", "- LearnFrameSize (number): Specifies the size of the learning frames. - LearnFrequency (str(never", "the DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC address only. -", "-------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "at which IxNetwork sends learn frames to the DUT. - LearnSendMacOnly (bool): Sends", "def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self):", "None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () -> int \"\"\" Returns", "InputParameters (str): The input arguments of the test. - async_operation (bool=False): True to", "FastPathRate(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload,", "server. Starts the specified Quick Test. The IxNetwork model allows for multiple method", "# type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the", "the operation is complete. - Returns bool: Raises ------ - NotFoundError: The requested", "type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () ->", "path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None", "is accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable':", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None)", "the server. Starts the specified Quick Test and waits for its execution to", "------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "will block until the operation is complete. - Returns str: This method is", "def LearnNumFrames(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any)", "does not exist on the server - ServerError: The server has encountered an", "resource does not exist on the server - ServerError: The server has encountered", "test. The LearnFrames class encapsulates a required learnFrames resource which will be retrieved", "length of time in ms that IxNetwork pauses before sending all the Raises", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs):", "------- - number: Specifies the rate at which IxNetwork sends learn frames to", "on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the operation", "frequently IxNetwork sends learning frames during the test. - LearnNumFrames (number): Specifies the", "payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "Returns ------- - number: Specifies the length of time in ms that IxNetwork", "Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during the test. The", "will block until the operation is complete. - Returns list(str): This method is", "i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in", "the fast path. - FastPathNumFrames (number): Specifies the number of learn frames that", "LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): #", "(*Any, **Any) -> None \"\"\"Executes the start operation on the server. Starts the", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self,", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'],", "fast path. - FastPathNumFrames (number): Specifies the number of learn frames that IxNetwork", "block until the operation is complete. Raises ------ - NotFoundError: The requested resource", "any person obtaining a copy # of this software and associated documentation files", "made through the Connection class will block until the operation is complete. -", "pauses before sending all the learning frames from all the ports. \"\"\" return", "frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type:", "operation on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation", "def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyAsync", "# type: () -> str \"\"\" Returns ------- - str(never | onBinaryIteration |", "Returns ------- - bool: Sends learning frames to MAC address only. \"\"\" return", "The input arguments of the test. - async_operation (bool=False): True to execute the", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "whom the # Software is furnished to do so, subject to the following", "operation is complete. Raises ------ - NotFoundError: The requested resource does not exist", "def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self):", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the", "address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) ->", "charge, to any person obtaining a copy # of this software and associated", "its execution to finish. The IxNetwork model allows for multiple method Signatures with", "{ 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames,", "(str): The input arguments of the test. - async_operation (bool=False): True to execute", "# type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport operation on the", "Returns ------- - bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def", "frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. -", "in ms that IxNetwork pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter", "the server. Waits for the execution of the specified Quick Test to be", "server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any", "same name while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to", "\"\"\" Returns ------- - number: Specifies the size of the learning frames in", "**Any) -> Union[str, None] \"\"\"Executes the generateReport operation on the server. Generate a", "true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the size of the", "DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files", "result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: ()", "of this software and associated documentation files (the \"Software\"), # to deal in", "\"\"\" Returns ------- - bool: Sends learning frames to MAC address only. \"\"\"", "- LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to", "of learn frames that IxNetwork sends through fast path. - FastPathRate (number): Specifies", "| oncePerTest | onTrial): Allows to choose how frequently IxNetwork sends learning frames", "(bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () -> bool", "time in ms that IxNetwork pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'])", "time in ms that IxNetwork pauses before sending all the Raises ------ -", "-> Union[List[str], None] \"\"\"Executes the run operation on the server. Starts the specified", "LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms that IxNetwork pauses before", "-> int \"\"\" Returns ------- - number: Specifies the size of the learning", "LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): #", "learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) ->", "class encapsulates a required learnFrames resource which will be retrieved from the server", "is complete. - Returns bool: Raises ------ - NotFoundError: The requested resource does", "'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime',", "bool \"\"\" Returns ------- - bool: If true, enables fast path transmit. \"\"\"", "'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP =", "through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int)", "return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value)", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs):", "bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource on the server. Args", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "Generate a PDF report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ -", "learning frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages.", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "that IxNetwork sends during the test. The LearnFrames class encapsulates a required learnFrames", "item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any)", "last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the", "@property def LearnNumFrames(self): # type: () -> int \"\"\" Returns ------- - number:", "learning frames that IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE", "IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): #", "arguments of the test. - async_operation (bool=False): True to execute the operation asynchronously.", "all the learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the", "= { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False):", "all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int)", "and associated documentation files (the \"Software\"), # to deal in the Software without", "------- - number: Specifies the rate at which IxNetwork sends learn frames through", "Start(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the start operation", "_SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent,", "# # Permission is hereby granted, free of charge, to any person obtaining", "in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type:", "report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True", "the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute", "solicitation messages. - LearnWaitTime (number): Specifies the length of time in ms that", "return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) ->", "pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): #", "# type: () -> bool \"\"\" Returns ------- - bool: Sends router solicitation", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "the specified Quick Test and waits for its execution to finish. The IxNetwork", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () -> int \"\"\" Returns", "LearnFrames class encapsulates a required learnFrames resource which will be retrieved from the", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int)", "required learnFrames resource which will be retrieved from the server every time the", "def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns ------- - bool: Sends", "the Connection class will block until the operation is complete. Raises ------ -", "of the learning frames in the fast path. - FastPathNumFrames (number): Specifies the", "value) @property def FastPathNumFrames(self): # type: () -> int \"\"\" Returns ------- -", "through fast path. - LearnFrameSize (number): Specifies the size of the learning frames.", "to the DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC address only.", "is complete. Raises ------ - NotFoundError: The requested resource does not exist on", "return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value)", "fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) ->", "None \"\"\"Executes the start operation on the server. Starts the specified Quick Test.", "list(str): This method is synchronous and returns the result of the test. Raises", "def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self):", "before sending all the Raises ------ - ServerError: The server has encountered an", "waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () -> bool \"\"\" Returns", "None] \"\"\"Executes the generateReport operation on the server. Generate a PDF report for", "returns the result of the test. Raises ------ - NotFoundError: The requested resource", "typing import List, Any, Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends", "Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type:", "= () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames':", "python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation", "---- - FastPathEnable (bool): If true, enables fast path transmit. - FastPathLearnFrameSize (number):", "size of the learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize |", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None)", "\"\"\"Executes the run operation on the server. Starts the specified Quick Test and", "FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): #", "Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: ()", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "sends learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value):", "the specified Quick Test. The IxNetwork model allows for multiple method Signatures with", "(number): Specifies the length of time in ms that IxNetwork pauses before sending", "SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base", "sends for each address. - LearnRate (number): Specifies the rate at which IxNetwork", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyAsync operation on the", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () -> int \"\"\"", "subject to the following conditions: # # The above copyright notice and this", "through the Connection class will block until the operation is complete. Raises ------", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload,", "def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self):", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None", "return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value)", "None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () -> int \"\"\" Returns", "for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item", "------- - bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self,", "operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of", "error condition \"\"\" payload = { \"Arg1\": self.href } for i in range(len(args)):", "_SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize',", "Starts the specified Quick Test. The IxNetwork model allows for multiple method Signatures", "LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies the length of", "true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): #", "Starts the specified Quick Test and waits for its execution to finish. The", "LearnWaitTime(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "sending all the learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies", "condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any)", "response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the", "frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None", "is hereby granted, free of charge, to any person obtaining a copy #", "time in ms that IxNetwork pauses before sending all the learning frames from", "the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms that", "same name while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "- InputParameters (str): The input arguments of the test. - async_operation (bool=False): True", "the rate at which IxNetwork sends learn frames to the DUT. \"\"\" return", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run',", "the number of learning frames that IxNetwork sends for each address. \"\"\" return", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type:", "will block until the operation is complete. Raises ------ - NotFoundError: The requested", "which will be retrieved from the server every time the property is accessed.", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self,", "If true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value):", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () -> int \"\"\"", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () -> int", "- FastPathLearnFrameSize (number): Specifies the size of the learning frames in the fast", "operation asynchronously. Any subsequent rest api calls made through the Connection class will", "kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): #", "so, subject to the following conditions: # # The above copyright notice and", "import List, Any, Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during", "Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to", "type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () ->", "self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () -> int \"\"\" Returns -------", "Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the", "-------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "# type: (*Any, **Any) -> None \"\"\"Executes the start operation on the server.", "of time in ms that IxNetwork pauses before sending all the \"\"\" return", "distribute, sublicense, # and/or sell copies of the Software, and to permit persons", "**kwargs): # type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on", "return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None", "@property def LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns ------- - bool:", "self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns -------", "calls made through the Connection class will block until the operation is complete.", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type:", "------ - ServerError: The server has encountered an uncategorized error condition \"\"\" return", "LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str, int, int, bool, bool,", "--------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns -------", "LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): #", "IxNetwork sends through fast path. - FastPathRate (number): Specifies the rate at which", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args,", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'],", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args,", "value) @property def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns ------- -", "(bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made", "the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None", "during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str)", "made through the Connection class will block until the operation is complete. start(InputParameters=string,", "LearnRate(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items():", "- LearnNumFrames (number): Specifies the number of learning frames that IxNetwork sends for", "\"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True", "bool: Sends learning frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def", "to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int)", "number of learn frames that IxNetwork sends through fast path. - FastPathRate (number):", "Sends learning frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self,", "None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () -> int \"\"\" Returns", "software and associated documentation files (the \"Software\"), # to deal in the Software", "def Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the apply", "this permission notice shall be included in # all copies or substantial portions", "kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): #", "| oncePerTest | onTrial)): Allows to choose how frequently IxNetwork sends learning frames", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None,", "sends learn frames through fast path. - LearnFrameSize (number): Specifies the size of", "Returns ------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport',", "FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): #", "self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns -------", "the number of learn frames that IxNetwork sends through fast path. - FastPathRate", "Union[List[str], None] \"\"\"Executes the run operation on the server. Starts the specified Quick", "by IXIA Keysight # # Permission is hereby granted, free of charge, to", "merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to", "Quick Test. The IxNetwork model allows for multiple method Signatures with the same", "for the execution of the specified Quick Test to be completed. waitForTest(async_operation=bool)list -------------------------------------", "**Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation on the server. Waits for", "def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "publish, distribute, sublicense, # and/or sell copies of the Software, and to permit", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'],", "*args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the apply operation on", "payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type:", "LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str, int,", "the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) ->", "succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the operation", "def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self):", "List, Any, Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during the", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None)", "start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of the test. -", "that IxNetwork pauses before sending all the Raises ------ - ServerError: The server", "item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any)", "LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int,", "the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments", "frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time", "None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns", "the # Software is furnished to do so, subject to the following conditions:", "finish. The IxNetwork model allows for multiple method Signatures with the same name", "asynchronous and has no return value. Raises ------ - NotFoundError: The requested resource", "int, int, str, int, int, bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames", "the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int)", "the operation asynchronously. Any subsequent rest api calls made through the Connection class", "the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute", "} for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for", "enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the size of the learning", "(bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () -> int", "() -> str \"\"\" Returns ------- - str(never | onBinaryIteration | oncePerFramesize |", "during the test. - LearnNumFrames (number): Specifies the number of learning frames that", "the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) ->", "waits for its execution to finish. The IxNetwork model allows for multiple method", "Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any", "learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): #", "the server - ServerError: The server has encountered an uncategorized error condition \"\"\"", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self,", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: ()", "range(len(args)): payload['Arg%s' % (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]]", "LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str, int, int,", "resource on the server. Args ---- - FastPathEnable (bool): If true, enables fast", "complete. - Returns bool: Raises ------ - NotFoundError: The requested resource does not", "- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.", "frames that IxNetwork sends through fast path. - FastPathRate (number): Specifies the rate", "This method is synchronous and returns the result of the test. run(InputParameters=string, async_operation=bool)list", "The server has encountered an uncategorized error condition \"\"\" payload = { \"Arg1\":", "not exist on the server - ServerError: The server has encountered an uncategorized", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () -> int \"\"\"", "server has encountered an uncategorized error condition \"\"\" payload = { \"Arg1\": self.href", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'],", "a PDF report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "# the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or", "'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP", "test. - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def", "in the fast path. - FastPathNumFrames (number): Specifies the number of learn frames", "self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property", "- number: Specifies the length of time in ms that IxNetwork pauses before", "FastPathNumFrames (number): Specifies the number of learn frames that IxNetwork sends through fast", "asynchronously. Any subsequent rest api calls made through the Connection class will block", "furnished to do so, subject to the following conditions: # # The above", "MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number):", "sending all the Raises ------ - ServerError: The server has encountered an uncategorized", "sends learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value):", "the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell", "Any subsequent rest api calls made through the Connection class will block until", "@LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self,", "Specifies the rate at which IxNetwork sends learn frames through fast path. \"\"\"", "def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the", "IxNetwork sends learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self,", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None]", "be included in # all copies or substantial portions of the Software. #", "that IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value):", "IxNetwork sends learn frames to the DUT. - LearnSendMacOnly (bool): Sends learning frames", "is synchronous and returns the result of the test. Raises ------ - NotFoundError:", "- ServerError: The server has encountered an uncategorized error condition \"\"\" payload =", "SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files from typing import List,", "(bool, int, int, int, int, str, int, int, bool, bool, int, int) ->", "payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type:", "through fast path. - FastPathRate (number): Specifies the rate at which IxNetwork sends", "- NotFoundError: The requested resource does not exist on the server - ServerError:", "# type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run operation on the", "the test. Raises ------ - NotFoundError: The requested resource does not exist on", "() _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames',", "oncePerFramesize | oncePerTest | onTrial): Allows to choose how frequently IxNetwork sends learning", "the test. - LearnNumFrames (number): Specifies the number of learning frames that IxNetwork", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs):", "the same name while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True", "return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value)", "server - ServerError: The server has encountered an uncategorized error condition \"\"\" payload", "specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation", "FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool,", "class will block until the operation is complete. - Returns bool: Raises ------", "result of the test. Raises ------ - NotFoundError: The requested resource does not", "def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self):", "\"\"\"Updates learnFrames resource on the server. Args ---- - FastPathEnable (bool): If true,", "# type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: ()", "has encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args,", "path. - FastPathNumFrames (number): Specifies the number of learn frames that IxNetwork sends", "Specifies the number of learn frames that IxNetwork sends through fast path. \"\"\"", "\"\"\" Returns ------- - bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter", "IxNetwork sends learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self,", "None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns", "- number: Specifies the size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter", "None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns", "that IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value):", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None,", "'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self):", "start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "frames that IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self,", "value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None,", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args,", "the server. Generate a PDF report for the last succesfull test run. generateReport(async_operation=bool)string", "method is synchronous and returns the result of the test. Raises ------ -", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload,", "LearnWaitTime (number): Specifies the length of time in ms that IxNetwork pauses before", "type: () -> int \"\"\" Returns ------- - number: Specifies the length of", "item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any)", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args,", "the Raises ------ - ServerError: The server has encountered an uncategorized error condition", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () ->", "address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies", "MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool)", "to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type:", "'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): #", "permit persons to whom the # Software is furnished to do so, subject", "------- - number: Specifies the number of learning frames that IxNetwork sends for", "-> bool \"\"\" Returns ------- - bool: Sends learning frames to MAC address", "copyright notice and this permission notice shall be included in # all copies", "self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property", "path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) ->", "server. Generate a PDF report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------", "Signatures with the same name while python does not. run(async_operation=bool)list ----------------------------- - async_operation", "the Software without restriction, including without limitation # the rights to use, copy,", "(bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies the length of time", "-> None \"\"\"Executes the apply operation on the server. Applies the specified Quick", "int, bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource on the server.", "class will block until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters", "server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to", "DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC address only. - LearnSendRouterSolicitation", "each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) ->", "int \"\"\" Returns ------- - number: Specifies the length of time in ms", "the generateReport operation on the server. Generate a PDF report for the last", "payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "block until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The", "the apply operation on the server. Applies the specified Quick Test. apply(async_operation=bool) ---------------------------", "learn frames to the DUT. - LearnSendMacOnly (bool): Sends learning frames to MAC", "*args, **kwargs): # type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation", "Connection class will block until the operation is complete. - Returns str: This", "@property def LearnWaitTime(self): # type: () -> int \"\"\" Returns ------- - number:", "of time in ms that IxNetwork pauses before sending all the learning frames", "to execute the operation asynchronously. Any subsequent rest api calls made through the", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () -> bool \"\"\"", "*args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyAsync operation on", "oncePerTest | onTrial)): Allows to choose how frequently IxNetwork sends learning frames during", "uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type:", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'],", "self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str],", "test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of the test.", "Returns str: This method is asynchronous and has no return value. Raises ------", "'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation':", "pauses before sending all the learning frames from all the ports. - LearnWaitTimeBeforeTransmit", "a copy # of this software and associated documentation files (the \"Software\"), #", "on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False):", "() -> int \"\"\" Returns ------- - number: Specifies the length of time", "Connection class will block until the operation is complete. Raises ------ - NotFoundError:", "operation on the server. Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- -", "LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) ->", "*args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run operation", "'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = {", "-> str \"\"\" Returns ------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest", "of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of", "the number of learn frames that IxNetwork sends through fast path. \"\"\" return", "The LearnFrames class encapsulates a required learnFrames resource which will be retrieved from", "the learning frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self,", "------- - number: Specifies the size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize'])", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self,", "\"\"\" Returns ------- - number: Specifies the rate at which IxNetwork sends learn", "value) @property def FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns ------- -", "model allows for multiple method Signatures with the same name while python does", "for its execution to finish. The IxNetwork model allows for multiple method Signatures", "} _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self,", "of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type:", "of the specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False):", "the server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "value) @property def LearnFrameSize(self): # type: () -> int \"\"\" Returns ------- -", "Specifies the number of learning frames that IxNetwork sends for each address. \"\"\"", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None)", "# all copies or substantial portions of the Software. # # THE SOFTWARE", "Software, and to permit persons to whom the # Software is furnished to", "associated documentation files (the \"Software\"), # to deal in the Software without restriction,", "the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) ->", "Args ---- - FastPathEnable (bool): If true, enables fast path transmit. - FastPathLearnFrameSize", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('waitForTest', payload=payload, response_object=None)", "(i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return", "FastPathLearnFrameSize (number): Specifies the size of the learning frames in the fast path.", "synchronous and returns the result of the test. Raises ------ - NotFoundError: The", "all copies or substantial portions of the Software. # # THE SOFTWARE IS", "self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None", "- LearnRate (number): Specifies the rate at which IxNetwork sends learn frames to", "ServerError: The server has encountered an uncategorized error condition \"\"\" payload = {", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def", "learn frames that IxNetwork sends through fast path. - FastPathRate (number): Specifies the", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "@property def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns ------- - bool:", "Specifies the size of the learning frames in the fast path. - FastPathNumFrames", "not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any", "None \"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False):", "def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the", "------------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "\"\"\" Returns ------- - number: Specifies the number of learning frames that IxNetwork", "following conditions: # # The above copyright notice and this permission notice shall", "with the same name while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False):", "the size of the learning frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'])", "_SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate':", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () -> bool \"\"\"", "multiple method Signatures with the same name while python does not. start(async_operation=bool) ---------------------------", "return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any) ->", "The above copyright notice and this permission notice shall be included in #", "learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of", "(*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport operation on the server. Generate", "% (i + 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1]", "Signatures with the same name while python does not. start(async_operation=bool) --------------------------- - async_operation", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs):", "subsequent rest api calls made through the Connection class will block until the", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None)", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): #", "number: Specifies the size of the learning frames in the fast path. \"\"\"", "None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () -> int \"\"\" Returns", "value) @property def LearnNumFrames(self): # type: () -> int \"\"\" Returns ------- -", "__init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () ->", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "int, int, bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource on the", "DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) -> None", "return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) ->", "type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: () ->", "bool: Raises ------ - NotFoundError: The requested resource does not exist on the", "to choose how frequently IxNetwork sends learning frames during the test. \"\"\" return", "Returns ------- - number: Specifies the size of the learning frames. \"\"\" return", "ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time in ms that IxNetwork", "bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource on the server. Args ----", "notice shall be included in # all copies or substantial portions of the", "-> Union[str, None] \"\"\"Executes the generateReport operation on the server. Generate a PDF", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "| onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to choose how frequently", "is complete. - Returns str: This method is asynchronous and has no return", "Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "Union[List[str], None] \"\"\"Executes the waitForTest operation on the server. Waits for the execution", "Base from uhd_restpy.files import Files from typing import List, Any, Union class LearnFrames(Base):", "@FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def", "value) @property def FastPathRate(self): # type: () -> int \"\"\" Returns ------- -", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () -> int", "how frequently IxNetwork sends learning frames during the test. - LearnNumFrames (number): Specifies", "'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', }", "type: () -> bool \"\"\" Returns ------- - bool: Sends router solicitation messages.", "length of time in ms that IxNetwork pauses before sending all the learning", "self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'],", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () ->", "deal in the Software without restriction, including without limitation # the rights to", "If true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the size of", "learnFrames resource on the server. Args ---- - FastPathEnable (bool): If true, enables", "**Any) -> None \"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- -", "the execution of the specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- -", "-> None \"\"\"Executes the start operation on the server. Starts the specified Quick", "- Returns str: This method is asynchronous and has no return value. Raises", "the operation is complete. - Returns list(str): This method is synchronous and returns", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply',", "------- - bool: Sends learning frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'])", "the stop operation on the server. Stops the currently running Quick Test. stop(async_operation=bool)", "@LearnRate.setter def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def", "block until the operation is complete. - Returns str: This method is asynchronous", "**Any) -> None \"\"\"Executes the stop operation on the server. Stops the currently", "LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int,", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: () -> bool", "**Any) -> None \"\"\"Executes the start operation on the server. Starts the specified", "frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows", "ms that IxNetwork pauses before sending all the Raises ------ - ServerError: The", "------- - number: Specifies the length of time in ms that IxNetwork pauses", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None,", "LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): #", "at which IxNetwork sends learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter", "self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] = args[i]", "path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None", "Raises ------ - ServerError: The server has encountered an uncategorized error condition \"\"\"", "item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args,", "value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type:", "oncePerTest | onTrial): Allows to choose how frequently IxNetwork sends learning frames during", "value) @property def LearnFrequency(self): # type: () -> str \"\"\" Returns ------- -", "number: Specifies the rate at which IxNetwork sends learn frames to the DUT.", "will block until the operation is complete. - Returns bool: Raises ------ -", "type: (*Any, **Any) -> None \"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool)", "(bool): If true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies the size", "frames in the fast path. - FastPathNumFrames (number): Specifies the number of learn", "returns the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The", "(number): Specifies the rate at which IxNetwork sends learn frames to the DUT.", "operation is complete. - Returns list(str): This method is synchronous and returns the", "def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None,", "# THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files from typing", "to the following conditions: # # The above copyright notice and this permission", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload,", "the Connection class will block until the operation is complete. start(InputParameters=string, async_operation=bool) ---------------------------------------------------", "kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): #", "complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of the test.", "- number: Specifies the number of learn frames that IxNetwork sends through fast", "-> int \"\"\" Returns ------- - number: Specifies the number of learning frames", "number: Specifies the length of time in ms that IxNetwork pauses before sending", "'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency':", "\"\"\"Executes the applyITWizardConfiguration operation on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool)", "response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes", "type: () -> int \"\"\" Returns ------- - number: Specifies the number of", "copies of the Software, and to permit persons to whom the # Software", "class will block until the operation is complete. - Returns list(str): This method", "def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self):", "int \"\"\" Returns ------- - number: Specifies the size of the learning frames.", "synchronous and returns the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters", "payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool, None]", "PDF report for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False):", "messages. - LearnWaitTime (number): Specifies the length of time in ms that IxNetwork", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self,", "'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames':", "Software is furnished to do so, subject to the following conditions: # #", "\"\"\"Executes the apply operation on the server. Applies the specified Quick Test. apply(async_operation=bool)", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs):", "learning frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value):", "Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute", "sending all the learning frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter", "only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies the", "None \"\"\"Executes the stop operation on the server. Stops the currently running Quick", "Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "the Connection class will block until the operation is complete. - Returns bool:", "self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () -> int \"\"\" Returns -------", "-> None \"\"\"Executes the stop operation on the server. Stops the currently running", "run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any", "------ - NotFoundError: The requested resource does not exist on the server -", "frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): #", "= item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any,", "self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def", "run operation on the server. Starts the specified Quick Test and waits for", "payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type:", "__slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize',", "kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): #", "Connection class will block until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- -", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self,", "learning frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value):", "from the server every time the property is accessed. \"\"\" __slots__ = ()", "on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the operation", "str \"\"\" Returns ------- - str(never | onBinaryIteration | oncePerFramesize | oncePerTest |", "= { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency':", "the size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value):", "int, str, int, int, bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource", "IxNetwork sends for each address. - LearnRate (number): Specifies the rate at which", "**Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------", "the length of time in ms that IxNetwork pauses before sending all the", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "that IxNetwork sends for each address. - LearnRate (number): Specifies the rate at", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "which IxNetwork sends learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def", "response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the", "number: Specifies the number of learn frames that IxNetwork sends through fast path.", "fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool)", "the following conditions: # # The above copyright notice and this permission notice", "run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of the test. -", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type:", "'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation',", "- str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to choose", "the run operation on the server. Starts the specified Quick Test and waits", "running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the operation", "----------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "sell copies of the Software, and to permit persons to whom the #", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "(number): Specifies the rate at which IxNetwork sends learn frames through fast path.", "to do so, subject to the following conditions: # # The above copyright", "test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None", "the waitForTest operation on the server. Waits for the execution of the specified", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def", "'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly',", "return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value)", "an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): #", "limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, #", "- LearnSendMacOnly (bool): Sends learning frames to MAC address only. - LearnSendRouterSolicitation (bool):", "on the server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False):", "'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def", "method is asynchronous and has no return value. Raises ------ - NotFoundError: The", "class will block until the operation is complete. - Returns str: This method", "} def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type:", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None)", "LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str, int, int, bool,", "property is accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = {", "block until the operation is complete. - Returns bool: Raises ------ - NotFoundError:", "while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the", "the result of the test. Raises ------ - NotFoundError: The requested resource does", "the server. Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False):", "kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): #", "value) @property def LearnWaitTime(self): # type: () -> int \"\"\" Returns ------- -", "| onTrial)): Allows to choose how frequently IxNetwork sends learning frames during the", "Any, Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during the test.", "Returns ------- - bool: If true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable'])", "# type: () -> int \"\"\" Returns ------- - number: Specifies the rate", "to finish. The IxNetwork model allows for multiple method Signatures with the same", "| onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to choose how frequently", "permission notice shall be included in # all copies or substantial portions of", "LearnFrameSize(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes", "- 2020 by IXIA Keysight # # Permission is hereby granted, free of", "type: (bool, int, int, int, int, str, int, int, bool, bool, int, int)", "------------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest", "type: () -> int \"\"\" Returns ------- - number: Specifies the rate at", "self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () -> str \"\"\" Returns -------", "str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to choose how", "- LearnWaitTime (number): Specifies the length of time in ms that IxNetwork pauses", "'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly':", "Allows to choose how frequently IxNetwork sends learning frames during the test. \"\"\"", "# type: (*Any, **Any) -> None \"\"\"Executes the applyAsync operation on the server.", "IXIA Keysight # # Permission is hereby granted, free of charge, to any", "sublicense, # and/or sell copies of the Software, and to permit persons to", "list_op) @property def FastPathEnable(self): # type: () -> bool \"\"\" Returns ------- -", "sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int)", "fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) ->", "Specifies the number of learn frames that IxNetwork sends through fast path. -", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'],", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\"", "frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): #", "def LearnRate(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): #", "to whom the # Software is furnished to do so, subject to the", "learning frames in the fast path. - FastPathNumFrames (number): Specifies the number of", "OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import", "IxNetwork pauses before sending all the learning frames from all the ports. -", "@FastPathRate.setter def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def", "will block until the operation is complete. start(InputParameters=string, async_operation=bool) --------------------------------------------------- - InputParameters (str):", "server. Args ---- - FastPathEnable (bool): If true, enables fast path transmit. -", "frames through fast path. - LearnFrameSize (number): Specifies the size of the learning", "shall be included in # all copies or substantial portions of the Software.", "= item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any,", "oncePerFramesize | oncePerTest | onTrial)): Allows to choose how frequently IxNetwork sends learning", "from typing import List, Any, Union class LearnFrames(Base): \"\"\"The learning frames that IxNetwork", "def LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns ------- - bool: Sends", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "complete. - Returns str: This method is asynchronous and has no return value.", "- bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value):", "transmit. - FastPathLearnFrameSize (number): Specifies the size of the learning frames in the", "operation on the server. Waits for the execution of the specified Quick Test", "return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value)", "specified Quick Test and waits for its execution to finish. The IxNetwork model", "is asynchronous and has no return value. Raises ------ - NotFoundError: The requested", "each address. - LearnRate (number): Specifies the rate at which IxNetwork sends learn", "which IxNetwork sends learn frames to the DUT. - LearnSendMacOnly (bool): Sends learning", "# type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on the server.", "without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense,", "- FastPathRate (number): Specifies the rate at which IxNetwork sends learn frames through", "the learning frames from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length", "-> int \"\"\" Returns ------- - number: Specifies the number of learn frames", "path. - LearnFrameSize (number): Specifies the size of the learning frames. - LearnFrequency", "\"Arg1\": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)] =", "# Copyright 1997 - 2020 by IXIA Keysight # # Permission is hereby", "apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "from uhd_restpy.files import Files from typing import List, Any, Union class LearnFrames(Base): \"\"\"The", "'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit':", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "- number: Specifies the size of the learning frames in the fast path.", "method Signatures with the same name while python does not. start(async_operation=bool) --------------------------- -", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('waitForTest',", "in the Software without restriction, including without limitation # the rights to use,", "path. - FastPathRate (number): Specifies the rate at which IxNetwork sends learn frames", "number: Specifies the number of learning frames that IxNetwork sends for each address.", "the learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest |", "multiple method Signatures with the same name while python does not. run(async_operation=bool)list -----------------------------", "copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software,", "Specifies the size of the learning frames in the fast path. \"\"\" return", "completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () -> str \"\"\"", "'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never',", "None] \"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False):", "*args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation", "Connection class will block until the operation is complete. - Returns list(str): This", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type: ()", "generateReport operation on the server. Generate a PDF report for the last succesfull", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #", "@property def FastPathRate(self): # type: () -> int \"\"\" Returns ------- - number:", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "from all the ports. - LearnWaitTimeBeforeTransmit (number): Specifies the length of time in", "the number of learning frames that IxNetwork sends for each address. - LearnRate", "number: Specifies the rate at which IxNetwork sends learn frames through fast path.", "int \"\"\" Returns ------- - number: Specifies the size of the learning frames", "int, int) -> LearnFrames \"\"\"Updates learnFrames resource on the server. Args ---- -", "def FastPathRate(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "- bool: Sends learning frames to MAC address only. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start',", "kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): #", "Quick Test and waits for its execution to finish. The IxNetwork model allows", "-> int \"\"\" Returns ------- - number: Specifies the length of time in", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () -> int", "kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): #", "None] \"\"\"Executes the run operation on the server. Starts the specified Quick Test", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "def FastPathLearnFrameSize(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the", "**kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation on", "router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool)", "This method is asynchronous and has no return value. Raises ------ - NotFoundError:", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () -> int \"\"\"", "IxNetwork pauses before sending all the Raises ------ - ServerError: The server has", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the start operation on the", "None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () -> str \"\"\" Returns", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type:", "def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self):", "def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "and returns the result of the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str):", "@property def FastPathEnable(self): # type: () -> bool \"\"\" Returns ------- - bool:", "-> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None,", "self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str,", "= { \"Arg1\": self.href } for i in range(len(args)): payload['Arg%s' % (i +", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "= item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs): # type: (*Any,", "async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of the test. - async_operation", "currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation (bool=False): True to execute the", "payload=payload, response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str, None]", "sends learning frames during the test. - LearnNumFrames (number): Specifies the number of", "frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self, value): # type:", "server. Starts the specified Quick Test and waits for its execution to finish.", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any)", "error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any,", "the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to", "copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the stop operation on the", "return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value)", "return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) ->", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self,", "- ServerError: The server has encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP,", "for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int)", "@LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def", "Keysight # # Permission is hereby granted, free of charge, to any person", "FastPathNumFrames(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "Permission is hereby granted, free of charge, to any person obtaining a copy", "# type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: ()", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "learning frames during the test. - LearnNumFrames (number): Specifies the number of learning", "encountered an uncategorized error condition \"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs):", "LearnSendMacOnly(self): # type: () -> bool \"\"\" Returns ------- - bool: Sends learning", "enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type:", "@FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def", "--------------------------------------------------- - InputParameters (str): The input arguments of the test. - async_operation (bool=False):", "Specifies the number of learning frames that IxNetwork sends for each address. -", "# The above copyright notice and this permission notice shall be included in", "The IxNetwork model allows for multiple method Signatures with the same name while", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs):", "the Connection class will block until the operation is complete. - Returns list(str):", "and to permit persons to whom the # Software is furnished to do", "ms that IxNetwork pauses before sending all the learning frames from all the", "Files from typing import List, Any, Union class LearnFrames(Base): \"\"\"The learning frames that", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('waitForTest', payload=payload,", "class LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during the test. The LearnFrames", "Specifies the length of time in ms that IxNetwork pauses before sending all", "complete. - Returns list(str): This method is synchronous and returns the result of", "LearnFrames(Base): \"\"\"The learning frames that IxNetwork sends during the test. The LearnFrames class", "IxNetwork pauses before sending all the learning frames from all the ports. \"\"\"", "substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize',", "for multiple method Signatures with the same name while python does not. start(async_operation=bool)", "Sends router solicitation messages. - LearnWaitTime (number): Specifies the length of time in", "payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type:", "learning frames that IxNetwork sends for each address. - LearnRate (number): Specifies the", "the operation is complete. - Returns str: This method is asynchronous and has", "# type: () -> int \"\"\" Returns ------- - number: Specifies the number", "payload = { \"Arg1\": self.href } for i in range(len(args)): payload['Arg%s' % (i", "self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str],", "'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames',", "(*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () -> int \"\"\"", "= item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any,", "time the property is accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP", "bool: If true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self,", "ApplyAsync(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyAsync operation", "\"\"\"Executes the stop operation on the server. Stops the currently running Quick Test.", "----------------------------------------------------- - InputParameters (str): The input arguments of the test. - async_operation (bool=False):", "the learning frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def FastPathLearnFrameSize(self,", "the test. - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () ->", "type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the server.", "item[1] return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any)", "value) @property def LearnRate(self): # type: () -> int \"\"\" Returns ------- -", "Waits for the execution of the specified Quick Test to be completed. waitForTest(async_operation=bool)list", "generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): #", "WaitForTest(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest", "True to execute the operation asynchronously. Any subsequent rest api calls made through", "for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self,", "response_object=None) def GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str, None] \"\"\"Executes", "() -> bool \"\"\" Returns ------- - bool: Sends router solicitation messages. \"\"\"", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation on the server.", "() -> int \"\"\" Returns ------- - number: Specifies the number of learning", "accessed. \"\"\" __slots__ = () _SDM_NAME = 'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable',", "to permit persons to whom the # Software is furnished to do so,", "with the same name while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False):", "server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to", "LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): #", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type:", "# type: (bool, int, int, int, int, str, int, int, bool, bool, int,", "size of the learning frames in the fast path. - FastPathNumFrames (number): Specifies", "'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property", "applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously. Any", "() -> int \"\"\" Returns ------- - number: Specifies the rate at which", "the rate at which IxNetwork sends learn frames through fast path. \"\"\" return", "stop operation on the server. Stops the currently running Quick Test. stop(async_operation=bool) --------------------------", "2020 by IXIA Keysight # # Permission is hereby granted, free of charge,", "(number): Specifies the size of the learning frames. - LearnFrequency (str(never | onBinaryIteration", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "'learnFrames' _SDM_ATT_MAP = { 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize':", "on the server. Waits for the execution of the specified Quick Test to", "that IxNetwork pauses before sending all the learning frames from all the ports.", "be retrieved from the server every time the property is accessed. \"\"\" __slots__", "rate at which IxNetwork sends learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate'])", "<filename>uhd_restpy/testplatform/sessions/ixnetwork/quicktest/learnframes_58e01d83db5d99bcabff902f5cf6ec51.py # MIT LICENSE # # Copyright 1997 - 2020 by IXIA Keysight", "LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): #", "no return value. Raises ------ - NotFoundError: The requested resource does not exist", "Test. apply(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsyncResult',", "until the operation is complete. Raises ------ - NotFoundError: The requested resource does", "(str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () -> int", "'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime':", "messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None", "type: (*Any, **Any) -> None \"\"\"Executes the stop operation on the server. Stops", "onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)): Allows to choose how frequently IxNetwork", "files (the \"Software\"), # to deal in the Software without restriction, including without", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "frames during the test. - LearnNumFrames (number): Specifies the number of learning frames", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None,", "def FastPathNumFrames(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "person obtaining a copy # of this software and associated documentation files (the", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () -> int \"\"\"", "all the Raises ------ - ServerError: The server has encountered an uncategorized error", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload, response_object=None) def", "to MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime", "self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any) -> None", "of the test. - async_operation (bool=False): True to execute the operation asynchronously. Any", "\"\"\"The learning frames that IxNetwork sends during the test. The LearnFrames class encapsulates", "above copyright notice and this permission notice shall be included in # all", "which IxNetwork sends learn frames through fast path. - LearnFrameSize (number): Specifies the", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None)", "\"\"\" payload = { \"Arg1\": self.href } for i in range(len(args)): payload['Arg%s' %", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type:", "on the server. Starts the specified Quick Test and waits for its execution", "execution to finish. The IxNetwork model allows for multiple method Signatures with the", "item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any)", "-> bool \"\"\" Returns ------- - bool: If true, enables fast path transmit.", "conditions: # # The above copyright notice and this permission notice shall be", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "# type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: ()", "# type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type: ()", "int \"\"\" Returns ------- - number: Specifies the number of learning frames that", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "number: Specifies the size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def", "LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): #", "or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "async_operation=bool) --------------------------------------------------- - InputParameters (str): The input arguments of the test. - async_operation", "rate at which IxNetwork sends learn frames through fast path. - LearnFrameSize (number):", "will be retrieved from the server every time the property is accessed. \"\"\"", "learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize | oncePerTest | onTrial)):", "item[1] return self._execute('generateReport', payload=payload, response_object=None) def Run(self, *args, **kwargs): # type: (*Any, **Any)", "notice and this permission notice shall be included in # all copies or", "@property def FastPathNumFrames(self): # type: () -> int \"\"\" Returns ------- - number:", "of charge, to any person obtaining a copy # of this software and", "str: This method is asynchronous and has no return value. Raises ------ -", "sends during the test. The LearnFrames class encapsulates a required learnFrames resource which", "LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int, int, int, int, str,", "LearnSendRouterSolicitation(self): # type: () -> bool \"\"\" Returns ------- - bool: Sends router", "LearnNumFrames (number): Specifies the number of learning frames that IxNetwork sends for each", "all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type: (int) ->", "operation on the server. Starts the specified Quick Test and waits for its", "not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type:", "FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None, LearnSendRouterSolicitation=None, LearnWaitTime=None, LearnWaitTimeBeforeTransmit=None): # type: (bool, int,", "Returns ------- - number: Specifies the rate at which IxNetwork sends learn frames", "FastPathLearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): #", "included in # all copies or substantial portions of the Software. # #", "value. Raises ------ - NotFoundError: The requested resource does not exist on the", "apply operation on the server. Applies the specified Quick Test. apply(async_operation=bool) --------------------------- -", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): #", "this software and associated documentation files (the \"Software\"), # to deal in the", "ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None", "@property def LearnRate(self): # type: () -> int \"\"\" Returns ------- - number:", "retrieved from the server every time the property is accessed. \"\"\" __slots__ =", "Software without restriction, including without limitation # the rights to use, copy, modify,", "exist on the server - ServerError: The server has encountered an uncategorized error", "------- - number: Specifies the number of learn frames that IxNetwork sends through", "- FastPathEnable (bool): If true, enables fast path transmit. - FastPathLearnFrameSize (number): Specifies", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "GenerateReport(self, *args, **kwargs): # type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport", "at which IxNetwork sends learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter", "= args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None)", "- number: Specifies the rate at which IxNetwork sends learn frames to the", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'], value) @property def FastPathRate(self): # type: () -> int", "def LearnWaitTime(self): # type: () -> int \"\"\" Returns ------- - number: Specifies", "operation is complete. - Returns str: This method is asynchronous and has no", "None self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () -> int \"\"\" Returns", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload,", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "The requested resource does not exist on the server - ServerError: The server", "on the server. Generate a PDF report for the last succesfull test run.", "at which IxNetwork sends learn frames through fast path. - LearnFrameSize (number): Specifies", "| oncePerFramesize | oncePerTest | onTrial): Allows to choose how frequently IxNetwork sends", "sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type:", "IN # THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files from", "import Base from uhd_restpy.files import Files from typing import List, Any, Union class", "# # Copyright 1997 - 2020 by IXIA Keysight # # Permission is", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "def FastPathRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self):", "the start operation on the server. Starts the specified Quick Test. The IxNetwork", "return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value)", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration', payload=payload, response_object=None) def", "def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self):", "(*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation on the server. Waits", "THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import Files from typing import", "(number): Specifies the size of the learning frames in the fast path. -", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def LearnFrequency(self): # type: () -> str", "specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to", "bool \"\"\" Returns ------- - bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'])", "persons to whom the # Software is furnished to do so, subject to", "int, int, int, str, int, int, bool, bool, int, int) -> LearnFrames \"\"\"Updates", "number of learning frames that IxNetwork sends for each address. - LearnRate (number):", "self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None, LearnFrequency=None, LearnNumFrames=None, LearnRate=None, LearnSendMacOnly=None,", "choose how frequently IxNetwork sends learning frames during the test. - LearnNumFrames (number):", "-> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self): # type: () -> int \"\"\"", "Test. The IxNetwork model allows for multiple method Signatures with the same name", "pauses before sending all the Raises ------ - ServerError: The server has encountered", "operation is complete. - Returns bool: Raises ------ - NotFoundError: The requested resource", "value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def LearnSendRouterSolicitation(self): # type:", "of learning frames that IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter", "'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], }", "the server every time the property is accessed. \"\"\" __slots__ = () _SDM_NAME", "rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies", "import Files from typing import List, Any, Union class LearnFrames(Base): \"\"\"The learning frames", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('generateReport', payload=payload,", "frames that IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self,", "to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of", "def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def FastPathLearnFrameSize(self):", "the learning frames in the fast path. - FastPathNumFrames (number): Specifies the number", "applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute", "frequently IxNetwork sends learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def", "LearnFrequency(self): # type: () -> str \"\"\" Returns ------- - str(never | onBinaryIteration", "in # all copies or substantial portions of the Software. # # THE", "\"\"\" Returns ------- - number: Specifies the length of time in ms that", "the operation is complete. Raises ------ - NotFoundError: The requested resource does not", "through the Connection class will block until the operation is complete. - Returns", "'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'],", "the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the operation asynchronously.", "{ \"Arg1\": self.href } for i in range(len(args)): payload['Arg%s' % (i + 2)]", "block until the operation is complete. - Returns list(str): This method is synchronous", "OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files", "return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value)", "@property def LearnWaitTimeBeforeTransmit(self): # type: () -> int \"\"\" Returns ------- - number:", "execute the operation asynchronously. Any subsequent rest api calls made through the Connection", "frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type:", "free of charge, to any person obtaining a copy # of this software", "type: () -> int \"\"\" Returns ------- - number: Specifies the size of", "'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload,", "FastPathEnable(self): # type: () -> bool \"\"\" Returns ------- - bool: If true,", "self._set_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize'], value) @property def FastPathNumFrames(self): # type: () -> int \"\"\" Returns -------", "list_op=False): super(LearnFrames, self).__init__(parent, list_op) @property def FastPathEnable(self): # type: () -> bool \"\"\"", "the specified Quick Test to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True", "Copyright 1997 - 2020 by IXIA Keysight # # Permission is hereby granted,", "# type: (*Any, **Any) -> None \"\"\"Executes the stop operation on the server.", "# type: (*Any, **Any) -> None \"\"\"Executes the apply operation on the server.", "of the learning frames in the fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathLearnFrameSize']) @FastPathLearnFrameSize.setter def", "on the server - ServerError: The server has encountered an uncategorized error condition", "USE OR OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base from", "number of learning frames that IxNetwork sends for each address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames'])", "in ms that IxNetwork pauses before sending all the learning frames from all", "@LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property def", "LearnWaitTimeBeforeTransmit(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None,", "on the server. Args ---- - FastPathEnable (bool): If true, enables fast path", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def FastPathNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathNumFrames'],", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def LearnWaitTime(self, value): #", "# type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the waitForTest operation on the", "learn frames that IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter def", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "(*Any, **Any) -> None \"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool) --------------------------------", "-> LearnFrames \"\"\"Updates learnFrames resource on the server. Args ---- - FastPathEnable (bool):", "2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('run', payload=payload,", "for the last succesfull test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to", "Apply(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the apply operation", "learnFrames resource which will be retrieved from the server every time the property", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit'], value) def update(self, FastPathEnable=None, FastPathLearnFrameSize=None, FastPathNumFrames=None, FastPathRate=None, LearnFrameSize=None,", "LICENSE # # Copyright 1997 - 2020 by IXIA Keysight # # Permission", "- bool: If true, enables fast path transmit. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def", "start operation on the server. Starts the specified Quick Test. The IxNetwork model", "onTrial)): Allows to choose how frequently IxNetwork sends learning frames during the test.", "applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute", "def FastPathEnable(self): # type: () -> bool \"\"\" Returns ------- - bool: If", "modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "Raises ------ - NotFoundError: The requested resource does not exist on the server", "while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute the", "through the Connection class will block until the operation is complete. start(InputParameters=string, async_operation=bool)", "\"\"\" return self._update(self._map_locals(self._SDM_ATT_MAP, locals())) def Apply(self, *args, **kwargs): # type: (*Any, **Any) ->", "api calls made through the Connection class will block until the operation is", "**kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the run operation on", "-> None \"\"\"Executes the applyITWizardConfiguration operation on the server. Applies the specified Quick", "that IxNetwork pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self,", "ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool, None] \"\"\"Executes the applyAsyncResult", "FastPathRate (number): Specifies the rate at which IxNetwork sends learn frames through fast", "\"\"\"Executes the waitForTest operation on the server. Waits for the execution of the", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: () ->", "**kwargs): # type: (*Any, **Any) -> Union[str, None] \"\"\"Executes the generateReport operation on", "def Start(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the start", "address. - LearnRate (number): Specifies the rate at which IxNetwork sends learn frames", "\"Software\"), # to deal in the Software without restriction, including without limitation #", "rate at which IxNetwork sends learn frames to the DUT. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnRate'])", "the server. Args ---- - FastPathEnable (bool): If true, enables fast path transmit.", "{ 'FastPathEnable': 'fastPathEnable', 'FastPathLearnFrameSize': 'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency',", "type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () ->", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "Returns ------- - number: Specifies the size of the learning frames in the", "(*Any, **Any) -> None \"\"\"Executes the apply operation on the server. Applies the", "# type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathRate'], value) @property def LearnFrameSize(self): # type: ()", "the same name while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True", "'fastPathLearnFrameSize', 'FastPathNumFrames': 'fastPathNumFrames', 'FastPathRate': 'fastPathRate', 'LearnFrameSize': 'learnFrameSize', 'LearnFrequency': 'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate',", "- LearnSendRouterSolicitation (bool): Sends router solicitation messages. - LearnWaitTime (number): Specifies the length", "type: (*Any, **Any) -> None \"\"\"Executes the apply operation on the server. Applies", "self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property", "Returns ------- - number: Specifies the number of learn frames that IxNetwork sends", "onTrial): Allows to choose how frequently IxNetwork sends learning frames during the test.", "self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def LearnRate(self): # type: () -> int \"\"\" Returns -------", "'learnFrequency', 'LearnNumFrames': 'learnNumFrames', 'LearnRate': 'learnRate', 'LearnSendMacOnly': 'learnSendMacOnly', 'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit',", "LearnNumFrames(self): # type: () -> int \"\"\" Returns ------- - number: Specifies the", "ms that IxNetwork pauses before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "has no return value. Raises ------ - NotFoundError: The requested resource does not", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "to be completed. waitForTest(async_operation=bool)list ------------------------------------- - async_operation (bool=False): True to execute the operation", "LearnSendMacOnly (bool): Sends learning frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends", "of learn frames that IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames']) @FastPathNumFrames.setter", "condition \"\"\" payload = { \"Arg1\": self.href } for i in range(len(args)): payload['Arg%s'", "@property def LearnFrequency(self): # type: () -> str \"\"\" Returns ------- - str(never", "= item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any,", "type: () -> bool \"\"\" Returns ------- - bool: Sends learning frames to", "sends learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value):", "the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int)", "\"\"\" Returns ------- - bool: If true, enables fast path transmit. \"\"\" return", "Allows to choose how frequently IxNetwork sends learning frames during the test. -", "payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes", "name while python does not. start(async_operation=bool) --------------------------- - async_operation (bool=False): True to execute", "and returns the result of the test. Raises ------ - NotFoundError: The requested", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "IxNetwork sends learn frames through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathRate']) @FastPathRate.setter def FastPathRate(self,", "\"\"\" Returns ------- - number: Specifies the size of the learning frames. \"\"\"", "+ 2)] = args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyITWizardConfiguration',", "-> int \"\"\" Returns ------- - number: Specifies the rate at which IxNetwork", "the test. The LearnFrames class encapsulates a required learnFrames resource which will be", "1997 - 2020 by IXIA Keysight # # Permission is hereby granted, free", "specified Quick Test. The IxNetwork model allows for multiple method Signatures with the", "self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) -> Union[bool,", "Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type:", "Connection class will block until the operation is complete. - Returns bool: Raises", "------- - number: Specifies the size of the learning frames in the fast", "# # The above copyright notice and this permission notice shall be included", "return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs): # type: (*Any, **Any) ->", "This method is synchronous and returns the result of the test. Raises ------", "- number: Specifies the rate at which IxNetwork sends learn frames through fast", "['never', 'onBinaryIteration', 'oncePerFramesize', 'oncePerTest', 'onTrial'], } def __init__(self, parent, list_op=False): super(LearnFrames, self).__init__(parent, list_op)", "before sending all the \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTimeBeforeTransmit']) @LearnWaitTimeBeforeTransmit.setter def LearnWaitTimeBeforeTransmit(self, value): # type:", "the size of the learning frames. - LearnFrequency (str(never | onBinaryIteration | oncePerFramesize", "**kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the applyITWizardConfiguration operation on the", "- Returns list(str): This method is synchronous and returns the result of the", "str, int, int, bool, bool, int, int) -> LearnFrames \"\"\"Updates learnFrames resource on", "of the test. Raises ------ - NotFoundError: The requested resource does not exist", "uhd_restpy.base import Base from uhd_restpy.files import Files from typing import List, Any, Union", "args[i] for item in kwargs.items(): payload[item[0]] = item[1] return self._execute('start', payload=payload, response_object=None) def", "@LearnFrameSize.setter def LearnFrameSize(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrameSize'], value) @property def", "- Returns bool: Raises ------ - NotFoundError: The requested resource does not exist", "fast path. - FastPathRate (number): Specifies the rate at which IxNetwork sends learn", "None \"\"\"Executes the applyITWizardConfiguration operation on the server. Applies the specified Quick Test.", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "frames that IxNetwork sends during the test. The LearnFrames class encapsulates a required", "self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () -> int \"\"\" Returns -------", "uhd_restpy.files import Files from typing import List, Any, Union class LearnFrames(Base): \"\"\"The learning", "MIT LICENSE # # Copyright 1997 - 2020 by IXIA Keysight # #", "-> bool \"\"\" Returns ------- - bool: Sends router solicitation messages. \"\"\" return", "LearnFrameSize (number): Specifies the size of the learning frames. - LearnFrequency (str(never |", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('applyAsync', payload=payload, response_object=None) def ApplyAsyncResult(self, *args, **kwargs):", "is complete. - Returns list(str): This method is synchronous and returns the result", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property def LearnWaitTime(self): # type: () -> int \"\"\" Returns -------", "the rate at which IxNetwork sends learn frames through fast path. - LearnFrameSize", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously.", "all the learning frames from all the ports. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnWaitTime']) @LearnWaitTime.setter def", "the test. run(InputParameters=string, async_operation=bool)list ----------------------------------------------------- - InputParameters (str): The input arguments of the", "and/or sell copies of the Software, and to permit persons to whom the", "input arguments of the test. - async_operation (bool=False): True to execute the operation", "a required learnFrames resource which will be retrieved from the server every time", "self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation'], value) @property", "copy # of this software and associated documentation files (the \"Software\"), # to", "bool: Sends router solicitation messages. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnSendRouterSolicitation']) @LearnSendRouterSolicitation.setter def LearnSendRouterSolicitation(self, value): #", "on the server. Stops the currently running Quick Test. stop(async_operation=bool) -------------------------- - async_operation", "self._get_attribute(self._SDM_ATT_MAP['LearnSendMacOnly']) @LearnSendMacOnly.setter def LearnSendMacOnly(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['LearnSendMacOnly'], value) @property", "portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "*args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the start operation on", "do so, subject to the following conditions: # # The above copyright notice", "method is synchronous and returns the result of the test. run(InputParameters=string, async_operation=bool)list -----------------------------------------------------", "@LearnWaitTime.setter def LearnWaitTime(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def", "type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value) @property def LearnNumFrames(self): # type: () ->", "return value. Raises ------ - NotFoundError: The requested resource does not exist on", "and has no return value. Raises ------ - NotFoundError: The requested resource does", "in kwargs.items(): payload[item[0]] = item[1] return self._execute('apply', payload=payload, response_object=None) def ApplyAsync(self, *args, **kwargs):", "operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation (bool=False): True to execute the", "(int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnWaitTime'], value) @property def LearnWaitTimeBeforeTransmit(self): # type: () -> int", "int \"\"\" Returns ------- - number: Specifies the number of learn frames that", "to choose how frequently IxNetwork sends learning frames during the test. - LearnNumFrames", "return self._execute('start', payload=payload, response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any) ->", "= item[1] return self._execute('run', payload=payload, response_object=None) def Start(self, *args, **kwargs): # type: (*Any,", "response_object=None) def Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the", "- FastPathNumFrames (number): Specifies the number of learn frames that IxNetwork sends through", "run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute the operation asynchronously. Any subsequent", "Union[bool, None] \"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation", "address. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnNumFrames']) @LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None", "# type: () -> int \"\"\" Returns ------- - number: Specifies the length", "name while python does not. run(async_operation=bool)list ----------------------------- - async_operation (bool=False): True to execute", "operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True to execute the", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'],", "async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls", "of the Software, and to permit persons to whom the # Software is", "\"\"\"Executes the applyAsyncResult operation on the server. applyAsyncResult(async_operation=bool)bool ------------------------------------------ - async_operation (bool=False): True", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "learn frames through fast path. - LearnFrameSize (number): Specifies the size of the", "kwargs.items(): payload[item[0]] = item[1] return self._execute('stop', payload=payload, response_object=None) def WaitForTest(self, *args, **kwargs): #", "method Signatures with the same name while python does not. run(async_operation=bool)list ----------------------------- -", "number of learn frames that IxNetwork sends through fast path. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathNumFrames'])", "None] \"\"\"Executes the waitForTest operation on the server. Waits for the execution of", "the applyITWizardConfiguration operation on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------", "-> None \"\"\"Executes the applyAsync operation on the server. applyAsync(async_operation=bool) -------------------------------- - async_operation", "Specifies the rate at which IxNetwork sends learn frames through fast path. -", "Sends learning frames to MAC address only. - LearnSendRouterSolicitation (bool): Sends router solicitation", "NotFoundError: The requested resource does not exist on the server - ServerError: The", "\"\"\" return self._get_attribute(self._SDM_ATT_MAP['FastPathEnable']) @FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'],", "Returns ------- - number: Specifies the number of learning frames that IxNetwork sends", "@property def LearnFrameSize(self): # type: () -> int \"\"\" Returns ------- - number:", "OTHER DEALINGS IN # THE SOFTWARE. from uhd_restpy.base import Base from uhd_restpy.files import", "sends learn frames to the DUT. - LearnSendMacOnly (bool): Sends learning frames to", "operation on the server. Starts the specified Quick Test. The IxNetwork model allows", "without restriction, including without limitation # the rights to use, copy, modify, merge,", "operation on the server. Generate a PDF report for the last succesfull test", "list(str): This method is synchronous and returns the result of the test. run(InputParameters=string,", "@LearnNumFrames.setter def LearnNumFrames(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnNumFrames'], value) @property def", "# to deal in the Software without restriction, including without limitation # the", "# MIT LICENSE # # Copyright 1997 - 2020 by IXIA Keysight #", "Stop(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the stop operation", "(the \"Software\"), # to deal in the Software without restriction, including without limitation", "def Run(self, *args, **kwargs): # type: (*Any, **Any) -> Union[List[str], None] \"\"\"Executes the", "(number): Specifies the number of learning frames that IxNetwork sends for each address.", "test run. generateReport(async_operation=bool)string ------------------------------------------ - async_operation (bool=False): True to execute the operation asynchronously.", "return self._execute('applyAsyncResult', payload=payload, response_object=None) def ApplyITWizardConfiguration(self, *args, **kwargs): # type: (*Any, **Any) ->", "type: () -> bool \"\"\" Returns ------- - bool: If true, enables fast", "during the test. The LearnFrames class encapsulates a required learnFrames resource which will", "for multiple method Signatures with the same name while python does not. run(async_operation=bool)list", "to deal in the Software without restriction, including without limitation # the rights", "'LearnSendRouterSolicitation': 'learnSendRouterSolicitation', 'LearnWaitTime': 'learnWaitTime', 'LearnWaitTimeBeforeTransmit': 'learnWaitTimeBeforeTransmit', } _SDM_ENUM_MAP = { 'learnFrequency': ['never', 'onBinaryIteration',", "onBinaryIteration | oncePerFramesize | oncePerTest | onTrial): Allows to choose how frequently IxNetwork", "None \"\"\"Executes the apply operation on the server. Applies the specified Quick Test.", "@FastPathEnable.setter def FastPathEnable(self, value): # type: (bool) -> None self._set_attribute(self._SDM_ATT_MAP['FastPathEnable'], value) @property def", "how frequently IxNetwork sends learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter", "return self._get_attribute(self._SDM_ATT_MAP['LearnRate']) @LearnRate.setter def LearnRate(self, value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value)", "applyITWizardConfiguration operation on the server. Applies the specified Quick Test. applyITWizardConfiguration(async_operation=bool) ------------------------------------------------ -", "hereby granted, free of charge, to any person obtaining a copy # of", "test. Raises ------ - NotFoundError: The requested resource does not exist on the", "has encountered an uncategorized error condition \"\"\" payload = { \"Arg1\": self.href }", "requested resource does not exist on the server - ServerError: The server has", "response_object=None) def Start(self, *args, **kwargs): # type: (*Any, **Any) -> None \"\"\"Executes the", "Specifies the size of the learning frames. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrameSize']) @LearnFrameSize.setter def LearnFrameSize(self,", "return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency']) @LearnFrequency.setter def LearnFrequency(self, value): # type: (str) -> None self._set_attribute(self._SDM_ATT_MAP['LearnFrequency'], value)", "made through the Connection class will block until the operation is complete. Raises", "\"\"\"Executes the generateReport operation on the server. Generate a PDF report for the", "value): # type: (int) -> None self._set_attribute(self._SDM_ATT_MAP['LearnRate'], value) @property def LearnSendMacOnly(self): # type:", "choose how frequently IxNetwork sends learning frames during the test. \"\"\" return self._get_attribute(self._SDM_ATT_MAP['LearnFrequency'])", "LearnRate (number): Specifies the rate at which IxNetwork sends learn frames to the" ]
[ "from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields =", "import serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset", "from rest_framework import serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model", "core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields = '__all__'", "rest_framework import serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model =", "serializers from core import models class AssetSerializer(serializers.ModelSerializer): class Meta: model = models.Asset fields" ]
[ "a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\"", "#count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2), c(2), d(2,2,1), okay(2), p(1)} \"\"\"", "\"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)}", "c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\"", "<filename>tests/wasp1/AllAnswerSets/aggregates_count_boundvariables_1.test.py input = \"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1.", "= 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2), c(2),", "#count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2),", "d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output", "1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2), c(2), d(2,2,1),", "input = \"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):-", "p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1.", "c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} =", "p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2), c(2), d(2,2,1), okay(2), p(1)}", "= \"\"\" c(2). p(1). a(2). d(2,2,1). okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X),", "okay(X):- c(X), #count{V:a(V),d(V,X,1)} = 1. ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output =", "ouch(X):- p(X), #count{V:a(V),d(V,X,1)} = 1. \"\"\" output = \"\"\" {a(2), c(2), d(2,2,1), okay(2)," ]
[ "+ bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI", "import hmac import random, sys import hashlib import binascii import urllib bot =", "urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" + toLang + \"&salt=\" + str(salt)", "import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session:", "if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"]", "sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status != 200: pass", "== \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to", "CommandSession, on_command from langdetect import detect, detect_langs from aiohttp import ClientSession from nonebot", "+ ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser", "session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种 salt =", "except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession): arg", "resp.status != 200: pass ShitAns = await resp.json() try: ans = [i[\"dst\"] for", "\")) arg = \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg)", "if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else", "ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\")", "nonebot.argparse import ArgumentParser import time import hmac import random, sys import hashlib import", "def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang =", "\"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2]", "pass ShitAns = await resp.json() try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]]", "wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"]", "session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] =", "import get_bot from nonebot.argparse import ArgumentParser import time import hmac import random, sys", "+ sign ) async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl)", "import time import hmac import random, sys import hashlib import binascii import urllib", "async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\",", "async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status != 200: pass ShitAns", "+ \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang +", "200: pass ShitAns = await resp.json() try: ans = [i[\"dst\"] for i in", "import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\",", "as resp: if resp.status != 200: pass ShitAns = await resp.json() try: ans", "== \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to", "= detect(arg) if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"]", "<filename>Pzzzzz/plugins/wm.py from nonebot import CommandSession, on_command from langdetect import detect, detect_langs from aiohttp", "langdetect import detect, detect_langs from aiohttp import ClientSession from nonebot import get_bot from", "# 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"},", "toLang + \"&salt=\" + str(salt) + \"&sign=\" + sign ) async with ClientSession()", "ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" +", "parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] =", "session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\":", "import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl", "str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" +", "session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession): arg =", "\"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to ==", "salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey", "CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\")", "random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl =", "== \"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if session.state[\"fr\"] == \"zh\":", "= ( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\"", "parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg", "coding=utf-8 import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def", "= ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\",", "session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run:", "\" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr ==", "\"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if argv.fr == \"no\":", "arg = \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if", "+ ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip()", "ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser =", "import CommandSession, on_command from langdetect import detect, detect_langs from aiohttp import ClientSession from", "session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"] =", "detect, detect_langs from aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse import", "ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status !=", "_(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str,", "bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI +", "prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang", "import random, sys import hashlib import binascii import urllib bot = get_bot() #", "fromLang + \"&to=\" + toLang + \"&salt=\" + str(salt) + \"&sign=\" + sign", "hmac import random, sys import hashlib import binascii import urllib bot = get_bot()", "session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] ==", "CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] #", "\"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"]", "= session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种 salt", "= session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\",", "+ myurl) as resp: if resp.status != 200: pass ShitAns = await resp.json()", "with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status", "random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign =", "+ q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl", "q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种", "+ toLang + \"&salt=\" + str(salt) + \"&sign=\" + sign ) async with", "arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\",", "argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\")", "\"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] #", "argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if", "hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession):", "str(salt) + \"&sign=\" + sign ) async with ClientSession() as sess: async with", "+ \"&to=\" + toLang + \"&salt=\" + str(salt) + \"&sign=\" + sign )", "= session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536)", "# 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt)", "from nonebot import CommandSession, on_command from langdetect import detect, detect_langs from aiohttp import", "!= 200: pass ShitAns = await resp.json() try: ans = [i[\"dst\"] for i", "parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if", "session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\",", "= session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q", "for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" +", "from nonebot import get_bot from nonebot.argparse import ArgumentParser import time import hmac import", "= get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import random @on_command(\"wm\",", "random, sys import hashlib import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL>", "+ \"&sign=\" + sign ) async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\"", "only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"]", "sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status != 200: pass ShitAns = await", "def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\",", "q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl +", "import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib", "urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import", "session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str,", "if argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"]", "type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \"", "+ urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" + toLang + \"&salt=\" +", "@on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\"", "sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\"", "65536) sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest()", "bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import random", "= \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session:", "fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768,", "百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False)", "sys import hashlib import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> #", "\"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if", "\"&salt=\" + str(salt) + \"&sign=\" + sign ) async with ClientSession() as sess:", "+ str(salt) + \"&sign=\" + sign ) async with ClientSession() as sess: async", "type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg", "try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\"", "\"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv =", "= [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"])", "ShitAns = await resp.json() try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans", "\"&to=\" + toLang + \"&salt=\" + str(salt) + \"&sign=\" + sign ) async", "async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang", "\"&sign=\" + sign ) async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" +", "+ \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" + toLang +", "as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status != 200:", "detect(arg) if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] =", "bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = (", "await resp.json() try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans)", "time import hmac import random, sys import hashlib import binascii import urllib bot", "session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign", "aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q", "session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] =", "+ \"&from=\" + fromLang + \"&to=\" + toLang + \"&salt=\" + str(salt) +", "else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\":", "aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser import time", "myurl) as resp: if resp.status != 200: pass ShitAns = await resp.json() try:", "session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"] = \"auto\" session.state[\"token\"] = arg", "import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib", "ArgumentParser import time import hmac import random, sys import hashlib import binascii import", "= random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign", "myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang", "# coding=utf-8 import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async", "# 原文语种 toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign =", "session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if argv.fr", "toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI +", "\"zh\" else: session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"] = \"auto\" session.state[\"token\"]", "async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if", "原文语种 toLang = session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI", "[i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\"", "from aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser import", "ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser import time import hmac", "= hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" +", "session.state[\"to\"] # 译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q +", "\"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\"", "on_command from langdetect import detect, detect_langs from aiohttp import ClientSession from nonebot import", "from langdetect import detect, detect_langs from aiohttp import ClientSession from nonebot import get_bot", "detect_langs from aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser", "\"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession):", "resp.json() try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except:", "else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"]", "import ClientSession from nonebot import get_bot from nonebot.argparse import ArgumentParser import time import", "resp: if resp.status != 200: pass ShitAns = await resp.json() try: ans =", "sign = bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl", "nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg == \"\":", "if session.is_first_run: parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\")", "from nonebot.argparse import ArgumentParser import time import hmac import random, sys import hashlib", "parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv", "myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) +", "default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token)", "if argv.fr == \"no\" else argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\"", "\"&from=\" + fromLang + \"&to=\" + toLang + \"&salt=\" + str(salt) + \"&sign=\"", "@wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if session.is_first_run: parser = ArgumentParser(session=session)", "== \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else argv.fr if", "\".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\"", "get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import urllib import random @on_command(\"wm\", aliases={\"翻译\",", "hashlib import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import", "= await resp.json() try: ans = [i[\"dst\"] for i in ShitAns[\"trans_result\"]] ans =", "get_bot from nonebot.argparse import ArgumentParser import time import hmac import random, sys import", "+ bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" +", "binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8 import hashlib import", "default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg ==", "译文语种 salt = random.randint(32768, 65536) sign = bot.config.BAIDUAPI + q + str(salt) +", "import ArgumentParser import time import hmac import random, sys import hashlib import binascii", "= bot.config.BAIDUAPI + q + str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl =", "import hashlib import binascii import urllib bot = get_bot() # 百度通用翻译API,不包含词典、tts语音合成等资源,如有相关需求请联系<EMAIL> # coding=utf-8", "arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr == \"no\" else argv.fr", "argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] =", "\"zh\" if argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else:", "\"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \")) arg =", "== \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\"", "if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if session.state[\"fr\"]", "ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def", "urllib import random @on_command(\"wm\", aliases={\"翻译\", \"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\")", "argv.fr if session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if", "( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" +", "i in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans)", "ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async def _(session: CommandSession): arg = session.current_arg_text.strip() if", "\"translate\"}, only_to_me=False) async def wm(session: CommandSession): session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q =", "myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang =", "hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\" + bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q)", ") async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp:", "+ \"&salt=\" + str(salt) + \"&sign=\" + sign ) async with ClientSession() as", "sign ) async with ClientSession() as sess: async with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as", "nonebot import get_bot from nonebot.argparse import ArgumentParser import time import hmac import random,", "parser = ArgumentParser(session=session) parser.add_argument(\"--fr\", \"-f\", type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str,", "with sess.get(\"https://fanyi-api.baidu.com\" + myurl) as resp: if resp.status != 200: pass ShitAns =", "import detect, detect_langs from aiohttp import ClientSession from nonebot import get_bot from nonebot.argparse", "ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser async", "= parser.parse_args(session.current_arg.split(\" \")) arg = \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"]", "+ str(salt) + bot.config.BAIDUKey sign = hashlib.md5(sign.encode()).hexdigest() myurl = ( myurl + \"?appid=\"", "= \"zh\" if argv.to == \"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\"", "= \"zh\" else: session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"] = \"auto\"", "+ fromLang + \"&to=\" + toLang + \"&salt=\" + str(salt) + \"&sign=\" +", "in ShitAns[\"trans_result\"]] ans = \"\\n\".join(ans) except: session.finish(\"翻译错误,原因是:\" + ShitAns[\"error_code\"]) session.finish(\"翻译结果为:\\n\" + ans) @wm.args_parser", "default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\" \"))", "session.state[\"fr\"][:2] == \"zh\": session.state[\"fr\"] = \"zh\" if argv.to == \"no\": if session.state[\"fr\"] ==", "= \"en\" else: session.state[\"to\"] = \"zh\" else: session.state[\"to\"] = argv.to if argv.fr ==", "else: session.state[\"to\"] = argv.to if argv.fr == \"no\": session.state[\"fr\"] = \"auto\" session.state[\"token\"] =", "= \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种 toLang = session.state[\"to\"]", "= \" \".join(argv.token) if arg == \"\": session.pause(\"输入不能为空哦!\") session.state[\"fr\"] = detect(arg) if argv.fr", "bot.config.BAIDUAPI + \"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" + toLang", "if resp.status != 200: pass ShitAns = await resp.json() try: ans = [i[\"dst\"]", "session.get(\"token\", prompt=\"请输入你想翻译的句子!\") myurl = \"/api/trans/vip/translate\" q = session.state[\"token\"] fromLang = session.state[\"fr\"] # 原文语种", "type=str, default=\"no\") parser.add_argument(\"--to\", \"-t\", type=str, default=\"no\") parser.add_argument(\"token\", type=str, default=\"\", nargs=\"+\") argv = parser.parse_args(session.current_arg.split(\"", "nonebot import CommandSession, on_command from langdetect import detect, detect_langs from aiohttp import ClientSession", "\"no\": if session.state[\"fr\"] == \"zh\": session.state[\"to\"] = \"en\" else: session.state[\"to\"] = \"zh\" else:", "\"&q=\" + urllib.parse.quote(q) + \"&from=\" + fromLang + \"&to=\" + toLang + \"&salt=\"" ]
[ "lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p,", "assert self.lp != 0, \"Can't construct a new LpSolve model\" self.colbuff = (c_int", "new LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double * maxvars)()", "self.numbers = numbers self.vars = vars self.optype = None self.rhs = None def", "cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't add constraint", "platform.\") # Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes =", "# Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__)", "0 or ret == 1: self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\"", "for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the model.\"", "constraint(self, const): \"Add a new constraint into the model.\" assert const.optype is not", "type of the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype ==", "__le__(self, val): self.optype, self.rhs = (1, val) return self def __eq__(self, val): self.optype,", "self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change the", "maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint", "construct a new LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double", "self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1,", "lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p,", "c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes", "c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes =", "lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p,", "elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't", "sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\":", "path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform", "= [] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct a", "for calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index", "assert const.optype is not None, \"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff,", "c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes =", "into the model.\" assert const.optype is not None, \"You must provide the RHS", "platform # Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here =", "c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype", "\"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num): return", "c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some", "self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const):", "= \"real\" self.retype(vtype) def retype(self, vtype): \"Change the type of the variable\" if", "constraint.\" def __init__(self, numbers, vars): self.numbers = numbers self.vars = vars self.optype =", "(C) 2018, <NAME> License MIT \"\"\" from ctypes import * import sys import", "1, \"Can't get variable values\" for i, var in enumerate(self.vars): var.value = self.rowbuff[i]", "2018, <NAME> License MIT \"\"\" from ctypes import * import sys import os.path", "\"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here,", "ret == 1: self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self,", "lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve library on", "def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars = []", "= None self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype):", "this platform.\") # Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes", "def __le__(self, val): self.optype, self.rhs = (1, val) return self def __eq__(self, val):", "LpSolve wrapper. Copyright (C) 2018, <NAME> License MIT \"\"\" from ctypes import *", "= POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes", "variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get", "fill_buffers(self, colno, row): \"Fill colno and row buffers for calling LpSolve.\" for i,", "lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint into", "const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret ==", "is not None, \"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret", "c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p]", "= [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def", "lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes =", "lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte", "= c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte", "len(c.vars) == len(set(c.vars)), \"Some variables appear several times\" return c def __le__(self, val):", "= windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\"", "% ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else:", "ctypes import * import sys import os.path as path import platform # Import", "other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear several times\"", "c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set objective function of model\"", "self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp,", "self.debug = debug self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars)", "self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert", "wrapper. Copyright (C) 2018, <NAME> License MIT \"\"\" from ctypes import * import", "self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert", "= cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve library on this", "__init__(self, numbers, vars): self.numbers = numbers self.vars = vars self.optype = None self.rhs", "\"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1,", "self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0", "in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in (self.type, vtype):", "new constraint into the model.\" assert const.optype is not None, \"You must provide", "objective function of model\" def update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp,", "* maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def", "from ctypes import * import sys import os.path as path import platform #", "== \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def", "self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change the type of the variable\"", "LpSolve library on this platform.\") # Make the bindings c_double_p = POINTER(c_double) c_int_p", "lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p,", "in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num],", "lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False):", "= (c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self):", "other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self,", "model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if", "= LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables", "on this platform.\") # Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int)", "if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform ==", "const.optype, const.rhs) assert ret == 1, \"Can't add constraint into model\" def objective(self,", "ret = lib.solve(self.lp) if ret == 0 or ret == 1: self.update_variables() return", "c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes", "self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret", "len(lp.vars) + 1 self.value = None self.lp = lp lp.vars.append(self) self.type = \"real\"", "\"Can't add a variable: \" self.index = len(lp.vars) + 1 self.value = None", "\"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self,", "ret == 1, \"Can't add constraint into model\" def objective(self, const): \"Set the", "\"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars =", "__add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class", "values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get variable", "self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def", "[1], self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars)", "self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct a new LpSolve", "License MIT \"\"\" from ctypes import * import sys import os.path as path", "\"Add a new constraint into the model.\" assert const.optype is not None, \"You", "\"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff,", "!= 0, \"Can't construct a new LpSolve model\" self.colbuff = (c_int * maxvars)()", "assert ret == 1, \"Can't set objective function of model\" def update_variables(self): \"Update", "__eq__(self, val): self.optype, self.rhs = (3, val) return self def __ge__(self, val): self.optype,", "__init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars = [] self.lp", "= POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes", "c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes", "lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype =", "or ret == 1: self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\" def", "\"Fill colno and row buffers for calling LpSolve.\" for i, (num, var) in", "lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change the type of the", "the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform", "the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype", "\"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint):", "= lib.solve(self.lp) if ret == 0 or ret == 1: self.update_variables() return ret", "[other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) ==", "\"Can't set objective function of model\" def update_variables(self): \"Update the variable values.\" ret", "ValueError(\"Can't load LpSolve library on this platform.\") # Make the bindings c_double_p =", "Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if", "c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p,", "lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change the type of", "Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars", "(vtype == \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\"))", "+ other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear several", "\"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers = numbers self.vars = vars", "assert ret == 1, \"Can't get variable values\" for i, var in enumerate(self.vars):", "= c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes =", "elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num):", "self def __eq__(self, val): self.optype, self.rhs = (3, val) return self def __ge__(self,", "(self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num], [self])", "a variable: \" self.index = len(lp.vars) + 1 self.value = None self.lp =", "== \"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other,", "vtype): \"Change the type of the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp,", "lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int", "c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes =", "lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte]", "\"Change the type of the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index,", "row buffers for calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i]", "the model.\" assert const.optype is not None, \"You must provide the RHS of", "LpConstraint(self.numbers + [1], self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars", "[c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object):", "the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff,", "len(set(c.vars)), \"Some variables appear several times\" return c def __le__(self, val): self.optype, self.rhs", "objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff,", "def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3)", "assert len(lp.vars) < lp.maxvars, \"Can't add a variable: \" self.index = len(lp.vars) +", "other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers = numbers", "def update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret", "[c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype =", "return self def __eq__(self, val): self.optype, self.rhs = (3, val) return self def", "LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else: c = LpConstraint(self.numbers +", "class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug", "1: self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"):", "= c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug", "c def __le__(self, val): self.optype, self.rhs = (1, val) return self def __eq__(self,", "colno and row buffers for calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers,", "= [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p,", "enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def __add__(self, other): if isinstance(other,", "raise ValueError(\"Can't load LpSolve library on this platform.\") # Make the bindings c_double_p", "c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes", "lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint into the model.\" assert const.optype", "class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars,", "of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype,", "vars self.optype = None self.rhs = None def fill_buffers(self, colno, row): \"Fill colno", "lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes =", "cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set objective function of model\" def", "= c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear", "<NAME> License MIT \"\"\" from ctypes import * import sys import os.path as", "LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars):", "\"\"\" from ctypes import * import sys import os.path as path import platform", "other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else: c", "= num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars", "__add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else:", "class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers = numbers self.vars", "debug=False): self.debug = debug self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0,", "isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve", "self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct", "return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def", "bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype =", "lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0 or", "colno, row): \"Fill colno and row buffers for calling LpSolve.\" for i, (num,", "self.rhs = (1, val) return self def __eq__(self, val): self.optype, self.rhs = (3,", "lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype =", "\"\"\" LpSolve wrapper. Copyright (C) 2018, <NAME> License MIT \"\"\" from ctypes import", "= self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else:", "\" self.index = len(lp.vars) + 1 self.value = None self.lp = lp lp.vars.append(self)", "get variable values\" for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self):", "assert ret == 1, \"Can't add constraint into model\" def objective(self, const): \"Set", "LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a", "= lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1,", "= [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p]", "lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes =", "const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert", "constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs)", "< lp.maxvars, \"Can't add a variable: \" self.index = len(lp.vars) + 1 self.value", "lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p,", "values\" for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the", "(self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp,", "lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't", "+ other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear several times\" return c", "of model\" def update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p))", "maxvars) assert self.lp != 0, \"Can't construct a new LpSolve model\" self.colbuff =", "c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte", "colno[i] = var.index row[i] = num def __add__(self, other): if isinstance(other, LpVariable): return", "self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp !=", "[c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self,", "debug self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp", "\"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve library", "i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp)", "if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret ==", "return LpConstraint(self.numbers + [1], self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers,", "= c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes", "a new constraint into the model.\" assert const.optype is not None, \"You must", "ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't", "\"Some variables appear several times\" return c def __le__(self, val): self.optype, self.rhs =", "= None self.rhs = None def fill_buffers(self, colno, row): \"Fill colno and row", "1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers", "* import sys import os.path as path import platform # Import the DLL", "= maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0,", "buffers for calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] =", "c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype =", "self.index = len(lp.vars) + 1 self.value = None self.lp = lp lp.vars.append(self) self.type", "variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a variable:", "* maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new", "self.rhs = (3, val) return self def __ge__(self, val): self.optype, self.rhs = (2,", "self.optype, self.rhs = (1, val) return self def __eq__(self, val): self.optype, self.rhs =", "1, \"Can't set objective function of model\" def update_variables(self): \"Update the variable values.\"", "def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a variable: \"", "os.path as path import platform # Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize", "== 1, \"Can't add constraint into model\" def objective(self, const): \"Set the objective", "len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set objective function", "variable values\" for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve", "return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1,", "(3, val) return self def __ge__(self, val): self.optype, self.rhs = (2, val) return", "= var.index row[i] = num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers", "DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform ==", "def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint into the model.\"", "vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index,", "c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype =", "if ret == 0 or ret == 1: self.update_variables() return ret class LpVariable(object):", "LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers = numbers self.vars = vars self.optype", "(\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform == \"win32\": lib =", "c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p,", "[] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct a new", "\"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in (self.type,", "LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] =", "\"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add", "<filename>home/scripts/memory/lpsolve.py \"\"\" LpSolve wrapper. Copyright (C) 2018, <NAME> License MIT \"\"\" from ctypes", "objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp,", "lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct a new LpSolve model\" self.colbuff", "row[i] = num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1],", "[self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers =", "times\" return c def __le__(self, val): self.optype, self.rhs = (1, val) return self", "val) return self def __ge__(self, val): self.optype, self.rhs = (2, val) return self", "var.index row[i] = num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers +", "b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0 or ret", "in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug:", "c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug =", "provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p),", "= (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform == \"win32\": lib", "ret == 0 or ret == 1: self.update_variables() return ret class LpVariable(object): \"A", "sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load", "set objective function of model\" def update_variables(self): \"Update the variable values.\" ret =", "(vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if", "and row buffers for calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)):", "None def fill_buffers(self, colno, row): \"Fill colno and row buffers for calling LpSolve.\"", "lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming Engine.\"", "LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars", "const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars),", "self.value = None self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self,", "the type of the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype", "len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't add", "c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't add constraint into", "c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int,", "lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a variable: \" self.index =", "the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp)", "% ver))) else: raise ValueError(\"Can't load LpSolve library on this platform.\") # Make", "lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes", "num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return", "def retype(self, vtype): \"Change the type of the variable\" if \"bin\" in (self.type,", "windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" %", "None, \"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp,", "ver))) else: raise ValueError(\"Can't load LpSolve library on this platform.\") # Make the", "calling LpSolve.\" for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i]", "def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other])", "sys import os.path as path import platform # Import the DLL ver =", "vars): self.numbers = numbers self.vars = vars self.optype = None self.rhs = None", "\"Can't add constraint into model\" def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp,", "constraint into model\" def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff,", "= lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get variable values\" for", "enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp,", "as path import platform # Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize >", "= len(lp.vars) + 1 self.value = None self.lp = lp lp.vars.append(self) self.type =", "MIT \"\"\" from ctypes import * import sys import os.path as path import", "def __init__(self, numbers, vars): self.numbers = numbers self.vars = vars self.optype = None", "c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype", "not None, \"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret =", "row): \"Fill colno and row buffers for calling LpSolve.\" for i, (num, var)", "+ 1 self.value = None self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype)", "else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0 or ret ==", "lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype", "= c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte", "lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte]", "\"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret =", "None self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change", "add constraint into model\" def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0)", "None self.rhs = None def fill_buffers(self, colno, row): \"Fill colno and row buffers", "for i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num", "function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p))", "c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype =", "lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here,", "maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't", "import os.path as path import platform # Import the DLL ver = (\"x86\",", "const.rhs) assert ret == 1, \"Can't add constraint into model\" def objective(self, const):", "self.index, (vtype == \"bin\")) elif \"int\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype ==", "self.vars = vars self.optype = None self.rhs = None def fill_buffers(self, colno, row):", "(c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a", "return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers,", "= [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p,", "self.optype = None self.rhs = None def fill_buffers(self, colno, row): \"Fill colno and", "def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret =", "\"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve library on this platform.\") #", "(c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp)", "c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double]", "ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise", "solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret", "self.rhs = None def fill_buffers(self, colno, row): \"Fill colno and row buffers for", "other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear several times\" return c def", "0, \"Can't construct a new LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff", "other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object):", "c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype", "LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear", "== 1, \"Can't set objective function of model\" def update_variables(self): \"Update the variable", "Copyright (C) 2018, <NAME> License MIT \"\"\" from ctypes import * import sys", "path import platform # Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32]", "def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return", "RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p),", "lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get variable values\" for i,", "isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else: c = LpConstraint(self.numbers", "\"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver)))", "= path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif", "__init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a variable: \" self.index", "lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte", "c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int, c_double_p, c_int_p]", "c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming Engine.\" def __init__(self, maxvars,", "if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars + [other]) else: c =", "== \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib", "+ [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars)", "ret == 1, \"Can't get variable values\" for i, var in enumerate(self.vars): var.value", "[c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int,", "cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't add constraint into model\"", "val) return self def __eq__(self, val): self.optype, self.rhs = (3, val) return self", "into model\" def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff)", "i, (num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def", "= [c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype", "= debug self.maxvars = maxvars self.vars = [] self.lp = lib.make_lp(0, maxvars) assert", "maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self,", "1, \"Can't add constraint into model\" def objective(self, const): \"Set the objective function.\"", "several times\" return c def __le__(self, val): self.optype, self.rhs = (1, val) return", "[self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self,", "= c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p,", "+ [1], self.vars + [other]) else: c = LpConstraint(self.numbers + other.numbers, self.vars +", "= c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes", "== 1: self.update_variables() return ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp,", "val): self.optype, self.rhs = (1, val) return self def __eq__(self, val): self.optype, self.rhs", "var.value = self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\")", "0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret", "= c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes", "= (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add", "val): self.optype, self.rhs = (3, val) return self def __ge__(self, val): self.optype, self.rhs", "library on this platform.\") # Make the bindings c_double_p = POINTER(c_double) c_int_p =", "1) def __del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint into the", "cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get variable values\" for i, var", "[c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int,", "def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other])", "\"real\" self.retype(vtype) def retype(self, vtype): \"Change the type of the variable\" if \"bin\"", "lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret == 0 or ret == 1:", "c_int_p)) assert ret == 1, \"Can't set objective function of model\" def update_variables(self):", "= lib.make_lp(0, maxvars) assert self.lp != 0, \"Can't construct a new LpSolve model\"", "[c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype", "maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars = [] self.lp =", "variable: \" self.index = len(lp.vars) + 1 self.value = None self.lp = lp", "LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't", "= lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set", "lib.solve(self.lp) if ret == 0 or ret == 1: self.update_variables() return ret class", "[c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes = [c_void_p, c_int,", "c_int_p), const.optype, const.rhs) assert ret == 1, \"Can't add constraint into model\" def", "var in enumerate(self.vars): var.value = self.rowbuff[i] def solve(self): \"Solve the model.\" lib.set_maxim(self.lp) if", "return c def __le__(self, val): self.optype, self.rhs = (1, val) return self def", "lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p]", "ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars) <", "LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp,", "const): \"Add a new constraint into the model.\" assert const.optype is not None,", "1 self.value = None self.lp = lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def", "self.lp != 0, \"Can't construct a new LpSolve model\" self.colbuff = (c_int *", "LpConstraint(object): \"A LpSolve constraint.\" def __init__(self, numbers, vars): self.numbers = numbers self.vars =", "variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\"", "= None def fill_buffers(self, colno, row): \"Fill colno and row buffers for calling", "if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif \"int\" in", "of the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\"))", "the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't", "model\" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double * maxvars)() lib.set_add_rowmode(self.lp, 1)", "self.index, (vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other):", "= vars self.optype = None self.rhs = None def fill_buffers(self, colno, row): \"Fill", "Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int]", "# Make the bindings c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int,", "Programming Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars", "__del__(self): lib.delete_lp(self.lp) def constraint(self, const): \"Add a new constraint into the model.\" assert", "model\" def update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert", "variables appear several times\" return c def __le__(self, val): self.optype, self.rhs = (1,", "retype(self, vtype): \"Change the type of the variable\" if \"bin\" in (self.type, vtype):", "the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p),", "c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype =", "ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p), const.optype, const.rhs) assert ret ==", "3) ret = lib.solve(self.lp) if ret == 0 or ret == 1: self.update_variables()", "LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A LpSolve constraint.\"", "ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here = path.dirname(__file__) if sys.platform == \"win32\":", "numbers self.vars = vars self.optype = None self.rhs = None def fill_buffers(self, colno,", "lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p, c_int, c_double] lib.add_constraintex.restype = c_ubyte lib.set_obj_fnex.argtypes =", "load LpSolve library on this platform.\") # Make the bindings c_double_p = POINTER(c_double)", "numbers, vars): self.numbers = numbers self.vars = vars self.optype = None self.rhs =", "self.optype, self.rhs = (3, val) return self def __ge__(self, val): self.optype, self.rhs =", "= lp lp.vars.append(self) self.type = \"real\" self.retype(vtype) def retype(self, vtype): \"Change the type", "ret == 1, \"Can't set objective function of model\" def update_variables(self): \"Update the", "self.retype(vtype) def retype(self, vtype): \"Change the type of the variable\" if \"bin\" in", "c_double_p)) assert ret == 1, \"Can't get variable values\" for i, var in", "must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars), cast(self.rowbuff,", "LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1],", "= [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class", "[c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes = [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype", "= (3, val) return self def __ge__(self, val): self.optype, self.rhs = (2, val)", "\"Can't get variable values\" for i, var in enumerate(self.vars): var.value = self.rowbuff[i] def", "= [c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int]", "return ret class LpVariable(object): \"A LpSolve variable.\" def __init__(self, lp, vtype=\"real\"): assert len(lp.vars)", "POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes =", "import * import sys import os.path as path import platform # Import the", "c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte", "c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte", "def constraint(self, const): \"Add a new constraint into the model.\" assert const.optype is", "cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set objective function of", "== 1, \"Can't get variable values\" for i, var in enumerate(self.vars): var.value =", "c_double_p = POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p", "[c_void_p] lib.write_lp.argtypes = [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes", "[c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int, c_ubyte]", "= (1, val) return self def __eq__(self, val): self.optype, self.rhs = (3, val)", "\"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver))) elif sys.platform == \"linux\": lib =", "lp.maxvars, \"Can't add a variable: \" self.index = len(lp.vars) + 1 self.value =", "= numbers self.vars = vars self.optype = None self.rhs = None def fill_buffers(self,", "add a variable: \" self.index = len(lp.vars) + 1 self.value = None self.lp", "the variable\" if \"bin\" in (self.type, vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"bin\")) elif", "lib.set_maxim(self.lp) if self.debug: lib.write_lp(self.lp, b\"debug-model.lp\") else: lib.set_verbose(self.lp, 3) ret = lib.solve(self.lp) if ret", "= [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes = [c_void_p] lib.set_binary.argtypes = [c_void_p, c_int,", "__rmul__(self, num): return LpConstraint([num], [self]) def __add__(self, other): if isinstance(other, LpConstraint): return other.__add__(self)", "Engine.\" def __init__(self, maxvars, debug=False): self.debug = debug self.maxvars = maxvars self.vars =", "= [c_void_p, c_int, c_double_p, c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype", "> 2**32] here = path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\"", "lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def __add__(self,", "[c_void_p, c_int, c_ubyte] lib.set_binary.restype = c_ubyte lib.set_int.argtypes = [c_void_p, c_int, c_ubyte] lib.set_int.restype =", "POINTER(c_double) c_int_p = POINTER(c_int) lib.make_lp.argtypes = [c_int, c_int] lib.make_lp.restype = c_void_p lib.delete_lp.argtypes =", "len(lp.vars) < lp.maxvars, \"Can't add a variable: \" self.index = len(lp.vars) + 1", "var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def __add__(self, other):", "def __eq__(self, val): self.optype, self.rhs = (3, val) return self def __ge__(self, val):", "else: c = LpConstraint(self.numbers + other.numbers, self.vars + other.vars) assert len(c.vars) == len(set(c.vars)),", "== 0 or ret == 1: self.update_variables() return ret class LpVariable(object): \"A LpSolve", "if isinstance(other, LpConstraint): return other.__add__(self) return LpConstraint([1, 1], [self, other]) class LpConstraint(object): \"A", "= [c_void_p, c_char_p] lib.write_lp.restype = c_ubyte lib.set_verbose.argtypes = [c_void_p, c_int] lib.solve.argtypes = [c_void_p]", "(num, var) in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def __add__(self,", "appear several times\" return c def __le__(self, val): self.optype, self.rhs = (1, val)", "2**32] here = path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" %", "model\" def objective(self, const): \"Set the objective function.\" lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret", "\"Can't construct a new LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff =", "cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve library on this platform.\")", "== \"linux\": lib = cdll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.so\" % ver))) else: raise ValueError(\"Can't load LpSolve", "== len(set(c.vars)), \"Some variables appear several times\" return c def __le__(self, val): self.optype,", "lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert ret == 1, \"Can't set objective", "import sys import os.path as path import platform # Import the DLL ver", "model.\" assert const.optype is not None, \"You must provide the RHS of constraint\"", "[c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype", "update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret ==", "num def __add__(self, other): if isinstance(other, LpVariable): return LpConstraint(self.numbers + [1], self.vars +", "here = path.dirname(__file__) if sys.platform == \"win32\": lib = windll.LoadLibrary(path.abspath(path.join(here, \"dll/lpsolve55-%s.dll\" % ver)))", "c_int_p] lib.set_obj_fnex.restype = c_ubyte lib.set_add_rowmode.argtypes = [c_void_p, c_ubyte] lib.set_add_rowmode.restype = c_ubyte lib.set_maxim.argtypes =", "ret = lib.get_variables(self.lp, cast(self.rowbuff, c_double_p)) assert ret == 1, \"Can't get variable values\"", "lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The", "\"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.add_constraintex(self.lp, len(const.vars),", "assert len(c.vars) == len(set(c.vars)), \"Some variables appear several times\" return c def __le__(self,", "def fill_buffers(self, colno, row): \"Fill colno and row buffers for calling LpSolve.\" for", "import platform # Import the DLL ver = (\"x86\", \"x64\")[sys.maxsize > 2**32] here", "const.optype is not None, \"You must provide the RHS of constraint\" const.fill_buffers(self.colbuff, self.rowbuff)", "function of model\" def update_variables(self): \"Update the variable values.\" ret = lib.get_variables(self.lp, cast(self.rowbuff,", "lib.set_add_rowmode(self.lp, 0) const.fill_buffers(self.colbuff, self.rowbuff) ret = lib.set_obj_fnex(self.lp, len(const.vars), cast(self.rowbuff, c_double_p), cast(self.colbuff, c_int_p)) assert", "in enumerate(zip(self.numbers, self.vars)): colno[i] = var.index row[i] = num def __add__(self, other): if", "self.vars)): colno[i] = var.index row[i] = num def __add__(self, other): if isinstance(other, LpVariable):", "vtype): lib.set_binary(self.lp.lp, self.index, (vtype == \"int\")) def __rmul__(self, num): return LpConstraint([num], [self]) def", "self.vars + other.vars) assert len(c.vars) == len(set(c.vars)), \"Some variables appear several times\" return", "vtype=\"real\"): assert len(lp.vars) < lp.maxvars, \"Can't add a variable: \" self.index = len(lp.vars)", "= [c_void_p, c_int, c_ubyte] lib.set_int.restype = c_ubyte lib.add_constraintex.argtypes = [c_void_p, c_int, c_double_p, c_int_p,", "constraint into the model.\" assert const.optype is not None, \"You must provide the", "else: raise ValueError(\"Can't load LpSolve library on this platform.\") # Make the bindings", "(1, val) return self def __eq__(self, val): self.optype, self.rhs = (3, val) return", "c_int lib.get_variables.argtypes = [c_void_p, c_double_p] lib.get_variables.restype = c_ubyte class LpEngine(object): \"The Linear Programming", "= [c_void_p, c_int] lib.solve.argtypes = [c_void_p] lib.solve.restype = c_int lib.get_variables.argtypes = [c_void_p, c_double_p]", "a new LpSolve model\" self.colbuff = (c_int * maxvars)() self.rowbuff = (c_double *" ]
[ "'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor", "Unless required by applicable law or agreed to in writing, software # distributed", "_listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID", "repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG,", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "# Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "Apache License, Version 2.0 (the \"License\"); you may # not use this file", "'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid()", "the License. You may obtain # a copy of the License at #", "status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active", "repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock,", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self,", "# Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with(", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip)", "delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock)", "l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted =", "MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get,", "(database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self,", "lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "with the License. You may obtain # a copy of the License at", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock()", "id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict", "mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock()", "lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect", "listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool',", "provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy", "constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock)", "listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools =", "id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( #", "mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect", "# Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status':", "Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "(database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock)", "provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with(", "(database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock)", "= reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid,", "def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring()", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "# Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data", "'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id", "revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid()", "@mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock =", "'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB()", "mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID", "SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock()", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db", "mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={", "= Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid,", "mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1')", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "_amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock()", "mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result)", "'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID,", "# Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete,", "mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock)", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail')", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F =", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "you may # not use this file except in compliance with the License.", "= Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB()", "'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect =", "repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id)", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock()", "Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self,", "(database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock)", "mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock()", "= VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role =", "test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key", "def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks.", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the", "HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "= database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid,", "mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members')", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self,", "oslo_db import exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc", "repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid,", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with(", "Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch(", "def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks.", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test", "Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2')", "= {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID)", "mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR)", "mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect", "assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with", "'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self,", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description':", "L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST',", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST',", "= fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock)", "under the License. # import random from cryptography import fernet import mock from", "mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True)", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update,", "uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid()", "provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb", "= _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self,", "'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted =", "# import random from cryptography import fernet import mock from oslo_db import exception", "{constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update,", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id)", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with(", "mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon =", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'})", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self,", "mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID,", "mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR),", "You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "= database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID,", "side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp", "= ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception", "@mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule =", "id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock)", "POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR})", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE)", "uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid()", "Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock)", "the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, #", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER',", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "_l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id =", "update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0')", "super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True)", "id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete =", "mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) #", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update,", "sqlalchemy.orm import exc from taskflow.types import failure from octavia.common import constants from octavia.common", "database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock()", "def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID}", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail')", "return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert =", "def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock,", "repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id,", "mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "= Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate,", "mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "(database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock)", "mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners =", "def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the", "revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock()", "MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect", "id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID)", "# Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the", "def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj", "data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count)", "# Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "= database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self,", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB()", "'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with(", "MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID", "provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR)", "revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert", "id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID)", "redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners", "repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls(", "mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert", "repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None)", "POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID", "mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE)", "result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session,", "from sqlalchemy.orm import exc from taskflow.types import failure from octavia.common import constants from", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self,", "= mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure)", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update =", "vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID,", "mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-',", "mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with(", "server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP,", "'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test", "AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member =", "assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) #", "mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete')", "octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID", "Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock()", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "= [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2',", "# Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE)", "# repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid,", "= '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE =", "self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id':", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) #", "'192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1'", "vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "random from cryptography import fernet import mock from oslo_db import exception as odb_exceptions", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID,", "Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "= database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate')", "Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db(", "loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID,", "mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock)", "'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID =", "the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "# LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self,", "revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete')", "'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", ") update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "agreed to in writing, software # distributed under the License is distributed on", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID)", "data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) #", "@mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info =", "= database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with(", "= mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor =", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "_listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock =", "amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the", "constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with(", "mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect", "def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon =", "mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session,", "= PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock =", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type':", "subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID)", "self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock,", "mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock()", "Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "[]) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "from cryptography import fernet import mock from oslo_db import exception as odb_exceptions from", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test", "status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self,", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID,", "@mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert", "@mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete')", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name':", "role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls(", "vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key =", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, #", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID)", "= COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase):", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db", "return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db =", "= Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update,", "new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role)", "def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB()", "LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "failure from octavia.common import constants from octavia.common import data_models from octavia.common import utils", "repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id':", "mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health =", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self,", "to in writing, software # distributed under the License is distributed on an", "repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "{'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool", "[mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1,", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring(", "self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY,", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock)", "from octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from", "mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR)", "LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) #", "mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock()", "test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks.", "'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect", "revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception", "ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test", "listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update,", "= SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test", "2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock)", "mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls(", "test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID)", "LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active", "mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert", "update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the", "uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid()", "Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key)", "def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID)", "provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls(", "provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls(", "repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID,", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect =", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock)", "database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with(", "mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP')", "'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update =", "= database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def", "self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks,", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "the License. # import random from cryptography import fernet import mock from oslo_db", "mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert", "mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID", "ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "from oslo_db import exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import", "mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST',", "mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE)", "\"License\"); you may # not use this file except in compliance with the", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "repositories as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid()", "uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid()", "mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB())", "# operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id,", "loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID)", "database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST',", "role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock)", "mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "@mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks.", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock()", "self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID,", "= VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip =", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create", "= Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG,", "not use this file except in compliance with the License. You may obtain", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST',", "vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB()", "id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id)", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count)", "provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)])", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid,", "lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self,", "L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create',", "from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners)", "= database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock()", "mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock()", "= self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock()", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def", "TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid,", "update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock()", "mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG,", "provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb)", "mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data =", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create')", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id)", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with(", "= uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP =", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "= [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools)", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock,", "Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self,", "# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock)", "= Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid,", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "= Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test", "Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG,", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID)", "LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) #", "# Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock)", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self,", "the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID,", "provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) #", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self,", "mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get')", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self,", "= database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID", "mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST',", "the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock()", "mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock()", "mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with", "operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "_amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id", "mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update')", "database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete')", "update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2')", "mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception", "no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with", "lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP)", "vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def", "Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the", "COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def", "mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID,", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db(", "the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID,", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock)", "uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid()", "= LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the", "id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self,", "_amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def", "revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with", "exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate',", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST',", "'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)])", "mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP,", "= database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'},", "the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock()", "Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "= COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id =", "VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock]", "mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock()", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self,", "AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info =", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "= uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP =", "repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "@mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id", "repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db", "delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID)", "def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type':", "mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with(", "the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active", "repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "@mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete =", "mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with(", "[data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies)", "mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test',", "Copyright 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License,", "'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "[redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "[mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None,", "mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect", "LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, [])", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE)", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12'", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST',", "repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb)", "mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict])", "return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb =", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test", "import mock from oslo_db import exception as odb_exceptions from oslo_utils import uuidutils from", "= '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID =", "LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "{'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') #", "self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\",", "# Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST',", "status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "_loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete):", "@mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create =", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight':", "l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id,", "mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST',", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test',", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1,", "mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create =", "express or implied. See the # License for the specific language governing permissions", "= database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self,", "mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock)", "[{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy =", "'')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock =", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID", "'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def", "cryptography import fernet import mock from oslo_db import exception as odb_exceptions from oslo_utils", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete =", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update,", "revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self,", "# Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST',", "mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock()", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock)", "update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) #", "amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get,", "= data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool =", "repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255)", "= mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock =", "mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID)", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False)", "# Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id)", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test", "= get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1", "mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB()", "test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID:", "def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB())", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB())", "_amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id,", "IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update,", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with(", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo()", "= (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock()", "get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) #", "provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test", "'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock)", "database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id,", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG,", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock)", "mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid,", "= uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None]", "update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE)", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock()", "Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID}", "def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer()", "id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count)", "mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception", "test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB()", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update,", "mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert", "'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "= [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb)", "either express or implied. See the # License for the specific language governing", "get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self,", "mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID,", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "@mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener =", "uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP", "repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG,", "{constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with(", "update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the", "= (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock()", "lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock,", "create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def", "# POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj =", "VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-',", "test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB())", "= (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock()", "new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG,", "= Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID)", "_compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete')", "language governing permissions and limitations # under the License. # import random from", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR),", "= Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update,", "Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE,", "with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active", "uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid()", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock)", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock)", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock)", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update,", "Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.'", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) #", "mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock()", "delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self,", "def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) #", "AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test", "on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND,", "update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert')", "l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1,", "# Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR)", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect =", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID,", "database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "_compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG')", "mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1,", "None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID,", "repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test", "exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock)", "'10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with(", "delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock)", "test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE)", "(database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self,", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "= Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST',", "OR CONDITIONS OF ANY KIND, either express or implied. See the # License", "database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def", "obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail')", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "= map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID,", "Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "# Licensed under the Apache License, Version 2.0 (the \"License\"); you may #", "mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def", "provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls(", "# POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "= Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "= CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session',", "mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect", "test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID)", "l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "@mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict", "'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE)", "with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.'", "= LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id =", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update,", "[_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def", "disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid,", "test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID)", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock", "mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id =", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None,", "data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID:", "for the specific language governing permissions and limitations # under the License. #", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id", "database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with(", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST',", "mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock)", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls(", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID,", "'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 =", "'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock)", "may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy =", "= l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb =", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid,", "Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG,", "Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete,", "(database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP,", "vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails()", "revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB())", "# Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with(", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "= Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None,", "vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock)", "provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks.", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid,", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2)", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR)", "data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies", "Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session,", "id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock)", "mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect", "def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB())", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test',", "self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID,", "_amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with(", "BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "# Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id =", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST',", "def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae)", "= Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID", "mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id,", "_compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor", "= Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with(", "uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID", "update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) #", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no", "database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "the specific language governing permissions and limitations # under the License. # import", "= (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock()", "self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock()", "database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)", "repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted", "the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with", "Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect", "_compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update')", "# Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj,", "repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect", "POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id =", "# 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) #", "MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the", "@mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 =", "[_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock()", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock()", "= (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID)", "with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with(", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def", "{'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1,", "mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with(", "uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE'", "mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict)", "mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID)", "'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete,", "= database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) #", "update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock =", "\"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express", "LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip =", "Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID =", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count)", "= random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock =", "LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID", "'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool)", "id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID:", "mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock)", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock)", "MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool", "LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID", "mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect", "import fernet import mock from oslo_db import exception as odb_exceptions from oslo_utils import", "MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock()", "AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb =", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert", "availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id)", "redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock)", "revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update')", "id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the", "distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer)", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid,", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 =", "mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test", "= uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID =", "mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "_listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict =", "= Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid,", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE,", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) #", "(database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock)", "members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2", "_vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create,", "delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix", "# Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST',", "repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE:", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with(", "provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB()", "compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) #", "'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid()", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict])", "mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb)", "default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count)", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock()", "= uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae =", "HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer =", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "applicable law or agreed to in writing, software # distributed under the License", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect", "= database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG,", "listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id,", "mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) #", "the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\",", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with(", "= Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG,", "test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners", "vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID", "test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock)", "= database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG,", "mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool',", "''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB())", "update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST',", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR),", "name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID =", "_amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority", "database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) #", "mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock)", "import constants from octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import", "License. # import random from cryptography import fernet import mock from oslo_db import", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}]", "MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self,", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID,", "from oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types import failure from", "test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert", "[listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self,", "writing, software # distributed under the License is distributed on an \"AS IS\"", "# Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID)", "SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID", "_amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID)", "= (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test", "mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def", "database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) #", "update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail')", "self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "# Copyright 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID,", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1,", "# MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID", "mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert", "description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with(", "exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "[exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None,", "utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base", "= data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID:", "mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate,", "= Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get,", "listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count)", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self,", "'/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock)", "'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID,", "odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types import failure", "test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp =", "update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "# Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() #", "repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session,", "# Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with(", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail')", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self,", "= (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock()", "mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail')", "def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict", "def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete =", "mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect =", "Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring())", "= mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip =", "mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "# Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id)", "= database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock)", "id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock)", "MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def", "= database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST',", "def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB())", "repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener", "mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST',", "test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB())", "LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "# Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST',", "LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with(", "id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "# Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True)", "the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test',", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) #", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock)", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock =", "LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test", "Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID,", "None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE)", "mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members,", "mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP", "mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock =", "Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid,", "test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj =", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "# Unless required by applicable law or agreed to in writing, software #", "= map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) #", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "by applicable law or agreed to in writing, software # distributed under the", "mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self,", "'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with(", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self,", "result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value", "(database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock)", "update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error,", "provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result =", "mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert", "repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "[mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted", "VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID", "with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get',", "def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB()", "mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID,", "mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with(", "Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid,", "'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE)", "mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with(", "constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) #", "mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None])", "'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the", "repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT)", "mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG,", "Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); you may", "repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id,", "LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID", "assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST',", "= Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip)", "= Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid,", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) #", "= mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id =", "provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "= database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self,", "mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete,", "id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock)", "# Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not", "mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect =", "provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', #", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) #", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type':", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types import", "failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP',", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update", "default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count)", "mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock()", "'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2,", "LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp =", "mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT #", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address':", "mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with(", "# not use this file except in compliance with the License. You may", "# repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid,", "mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7],", "mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock()", "= database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock)", "assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG,", "mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock()", "'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id", "l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 =", "mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect", "new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority)", "= database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group =", "mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name':", "id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule =", "'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self,", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def", "mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict =", "revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "Version 2.0 (the \"License\"); you may # not use this file except in", "octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock)", "mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock()", "status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with(", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) #", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert", "<gh_stars>0 # Copyright 2015 Hewlett-Packard Development Company, L.P. # # Licensed under the", "= (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock()", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self,", "L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock(", "id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock)", "pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True)", "'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock()", "self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID,", "# Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock)", "members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')]", "import failure from octavia.common import constants from octavia.common import data_models from octavia.common import", "{ constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock", "= {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict}", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update')", "provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)])", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon", "revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self,", "_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID,", "repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)])", "mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get,", "= [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) #", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session,", "Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock)", "{'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert", "return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db", "'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active =", "Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP", "update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect", "test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer", "the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff)", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def", "def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks.", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy =", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update,", "_amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock", "id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData()", "mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB())", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID)", "permissions and limitations # under the License. # import random from cryptography import", "repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST',", "test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "# Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) #", "database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test", "revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "_amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id", "repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID =", "= database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock)", "self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB()", "the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with", "OF ANY KIND, either express or implied. See the # License for the", "test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb", "# 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0])", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', #", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "limitations # under the License. # import random from cryptography import fernet import", "_listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id", "AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active =", "= Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get',", "id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock()", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None,", "update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG,", "MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB()", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST',", "COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with(", "mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR)", "mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID:", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "= random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR =", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID:", "L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST',", "{constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool,", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def", "LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID:", "database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update')", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None,", "'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) #", "unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id,", "mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with(", "map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id", "amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock)", "(database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock)", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST',", "listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST',", "def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks.", "_pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update,", "mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool", "[]) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID,", "mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST',", "mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID)", "= database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test", "mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP,", "self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "# Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock()", "'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb =", "MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10'", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST',", "octavia.db import repositories as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test", "= database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) #", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID,", "(database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock)", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule", "mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG,", "id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock)", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock)", "MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners,", "= uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP =", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH,", "role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb =", "mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test", "mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description':", "Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "# Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete,", "[mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock()", "= HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority =", "IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID)", "= map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id", "disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP,", "VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock,", "exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid,", "L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict", "import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import", "revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete')", "# LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID", "{constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1',", "the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception", "mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB()", "revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock)", "repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)])", "[mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1,", "amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "# Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id,", "'192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid()", "MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock()", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST',", "self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id", "mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "= mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id =", "l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool]", "mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect", "def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock)", "provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp):", "CONDITIONS OF ANY KIND, either express or implied. See the # License for", "test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB())", "= POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id =", "id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test", "revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with(", "self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse()", "Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST',", "mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID)", "id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock)", "mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from", "= database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG,", "status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db =", "'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "{'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST',", "listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST',", "def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy()", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict)", "id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock)", "= AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock =", "test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock)", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB())", "= database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) #", "delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP,", "MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "= [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2]", "= (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock()", "status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "taskflow.types import failure from octavia.common import constants from octavia.common import data_models from octavia.common", "mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the", "provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB()", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "compliance with the License. You may obtain # a copy of the License", "random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid()", "delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock)", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP',", "mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with(", "constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST',", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID,", "update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details", "= (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock)", "mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None,", "repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called)", "[listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock()", "no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID,", "as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) #", "mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock()", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def", "status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test", "may # not use this file except in compliance with the License. You", "import database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base as base AMP_ID", "provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST',", "(database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock)", "'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock()", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool =", "update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail')", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID)", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "(database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock)", "import uuidutils from sqlalchemy.orm import exc from taskflow.types import failure from octavia.common import", "{constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') #", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE)", "database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock()", "@mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy =", "mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with(", "database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID)", "# Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock()", "= (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock()", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update,", "reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) #", "'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail')", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action':", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the", "= Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "= database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert", "listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count)", "def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB()", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self,", "revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with", "provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test", "the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self,", "create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id)", "mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session,", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "= Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay':", "mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock()", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "(database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "[_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self,", "'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "_cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id", "database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id,", "= '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY =", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active =", "test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry]", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update,", "map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def", "= Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID,", "AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect =", "test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock,", "load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid,", "self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED)", "mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE)", "the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test", "id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB()", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None])", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id,", "test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict =", "mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id =", "LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock()", "mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock)", "delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock)", "_l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock", "= database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) #", "update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def", "def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db =", "Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE)", "vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session,", "import random from cryptography import fernet import mock from oslo_db import exception as", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "[{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1", "amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id)", "status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG,", "update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock)", "mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete')", "Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self,", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create =", "busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def", "[mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock)", "= utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID)", "'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with(", "database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test", "result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test", "id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock)", "Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update,", "mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock =", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "@mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member =", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "# Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "= VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae =", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST',", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with", "database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id,", "id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip =", "'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock()", "with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def", "= (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock()", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def", "(database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock)", "data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool,", "# Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no", "mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "= (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def", "AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id", "'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test", "id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( #", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE)", "def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock)", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "= (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock()", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE)", "mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp)", "(database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get,", "update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock)", "# Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the", "uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11'", "AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test", "delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) #", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration()", "= (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR", "mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id,", "= (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID)", "self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock)", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "use this file except in compliance with the License. You may obtain #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls(", "1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock()", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "# Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "{'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock()", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self,", "exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types", "self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock", "Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert", "get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID,", "MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "# Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the", "mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the", "and limitations # under the License. # import random from cryptography import fernet", "create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with( 'TEST',", "= (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock()", "mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock()", "default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'},", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with(", "'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with(", "def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db =", "mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock]", "# Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "KIND, either express or implied. See the # License for the specific language", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "# Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST',", "create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect =", "mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute(", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock()", "import exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm import exc from", "file except in compliance with the License. You may obtain # a copy", "'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "(database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock)", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description':", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "_loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def", "mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST',", "mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock", "SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID", "LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get')", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "# Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST',", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict])", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active =", "= L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID", "= database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) #", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with(", "test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation()", "LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test", "self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1,", "database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session,", "Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once", "provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID,", "@mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer =", "# operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test", "repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock()", "side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp", "constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID)", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called()", "Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete,", "revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid,", "L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test", "= (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID)", "the # License for the specific language governing permissions and limitations # under", "= database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock()", "mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock()", "def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock", "def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock)", "database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID}", "provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session,", "= [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id =", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session,", "def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB()", "mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip =", "= mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect", "'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB())", "ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 =", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail')", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with(", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "= uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID =", "_amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID)", "database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID)", "provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) #", "mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id,", "Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock()", "or agreed to in writing, software # distributed under the License is distributed", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test", "'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer()", "'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData()", "= uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID =", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active =", "mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect =", "get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid,", "return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp =", "name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB())", "License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "listener2.l7policies = l7policies listeners = [listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db", "Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP')", "'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self, mock_l7rule_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock)", "= uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP =", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result =", "provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) #", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with", "# provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock()", "= data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2,", "mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "= uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE =", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID,", "listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB())", "default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules =", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB())", "Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock)", "map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "# Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy", "= update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update,", "_amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "Licensed under the Apache License, Version 2.0 (the \"License\"); you may # not", "self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners =", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID,", "id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid,", "mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid,", "'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) #", "mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id =", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update,", "LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id)", "AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "# Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None)", "@mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect =", "LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock()", "mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY,", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid,", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "= {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "= HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock =", "= database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock()", "= L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the", "License, Version 2.0 (the \"License\"); you may # not use this file except", "database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj =", "= database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock()", "mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock,", "return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id =", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) #", "= LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock =", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock)", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update,", "id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP')", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) #", "Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = (", "= L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock =", "@mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool =", "POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail')", "mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb)", "ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock)", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock()", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute()", "delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called()", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB()", "mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "@mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found", "sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence':", "= Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base as", "mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result)", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result", "def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora()", "Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "# Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert", "# Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID,", "fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE)", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id)", "= uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID =", "the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID,", "revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.'", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete =", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_pool = database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST',", "VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED,", "under the License is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self,", "= VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id =", "mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2',", "mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect", "AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj", "status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks.", "= database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE),", "mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID:", "Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session,", "def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip =", "from octavia.common import constants from octavia.common import data_models from octavia.common import utils from", "def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB())", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID)", "mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the", "= (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock()", "mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete,", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db =", "busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB()", "self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock()", "revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG,", "mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock)", "timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test", "except in compliance with the License. You may obtain # a copy of", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update,", "# Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test", "= database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz')", "'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool =", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock)", "provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update,", "mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active =", "mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock()", "= (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock()", "mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''),", "= Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG,", "= self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name':", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert", "id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with(", "@mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid,", "MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = {", "( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "# Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception", "mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock()", "# Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get')", "# Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST',", "test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks.", "id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock)", "redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies", "vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1", "revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect", "'172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100)", "LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active =", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update,", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db(", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock)", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "= _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id)", "mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "= mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock =", "'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None,", "mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "# Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member", "reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "_amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self,", "the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks.", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID)", "mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID})", "mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with(", "VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY", "utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "_compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self,", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid,", "= get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID,", "[mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST',", "mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None)", "database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base as base AMP_ID =", "HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR)", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [])", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect =", "test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks.", "self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG,", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session,", "id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST',", "Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock()", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update =", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called()", "_amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock)", "mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update,", "uuidutils from sqlalchemy.orm import exc from taskflow.types import failure from octavia.common import constants", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status':", "# Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock()", "# Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') #", "def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon =", "unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool", "'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID}", "id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB())", "'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called)", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR", "database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock()", "LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb", "'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool')", "'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict)", "provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "= 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id =", "License for the specific language governing permissions and limitations # under the License.", "# Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "= Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get,", "'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update,", "mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST')", "mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock()", "mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect =", "fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def", "delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE)", "= Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID,", "mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect", "= Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration',", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID)", "uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID = uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid()", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id =", "the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self,", "mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR)", "AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with(", "status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone =", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB()", "# Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')]", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "_amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae", "specific language governing permissions and limitations # under the License. # import random", "'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict =", "mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock()", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test", "mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active", "'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() #", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "[listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID,", "repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool()", "(database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls(", "{'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with(", "= database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid,", "revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'})", "= database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "= mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer()", "provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict)", "database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid,", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "_tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address", "repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self,", "status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail')", "mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "@mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value =", "test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db =", "uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid()", "COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID", "LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks.", "# Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the", "import exc from taskflow.types import failure from octavia.common import constants from octavia.common import", "mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST',", "with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE", "test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock)", "License. You may obtain # a copy of the License at # #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) #", "setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock()", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID)", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test", "provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "# Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect =", "with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None,", "= database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert", "lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2,", "from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks.", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock()", "= data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2)", "mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "ANY KIND, either express or implied. See the # License for the specific", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST',", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID,", "listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2,", "the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self,", "mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect =", "= database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update')", "(database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with", "Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "= database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id)", "mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock", "def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members =", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "# operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies", "mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "= (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock()", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate',", "= Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG,", "@mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID", "mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert", "repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix #", "mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock()", "= Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG,", "revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert", "uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST',", "'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get',", "'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID:", "'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "[mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST',", "'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update')", "amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb", "lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock()", "_amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id", "pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST',", "Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete')", "= Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with(", "provisioning_status=constants.ERROR) # Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock()", "Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count)", "Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock()", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID)", "Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP')", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def", "= uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id =", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def", "mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "See the # License for the specific language governing permissions and limitations #", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP, new_amp.vrrp_ip) self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id)", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 =", "[_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID", "Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID,", "= database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') #", "# 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "id=LB_ID, provisioning_status=constants.ERROR) # Test the revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called()", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self,", "test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id", "law or agreed to in writing, software # distributed under the License is", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', #", "# Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): update_listener = database_tasks.UpdateListenerInDB() listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'})", "self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST',", "Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB()", "mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with(", "revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception", "mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def", "= (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock()", "# a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock)", "operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG,", "= database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) #", "= uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID =", "fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG,", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self,", "id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock", "the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def", "{'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID)", "mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "= (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock()", "mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None,", "governing permissions and limitations # under the License. # import random from cryptography", "# TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def", "# TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST',", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "(database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test", "MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail')", "exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def", "mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool')", "listener_dict = {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test',", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update,", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDBByPool() delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert", "Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies =", "= [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb)", "_loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id", "# Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING,", "mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) #", "mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False)", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid,", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) #", "revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self,", "lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj =", "Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', #", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session,", "provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)])", "# Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test", "update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock)", "provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID,", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP", "image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "octavia.common import constants from octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock,", "'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "= [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy", "mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( #", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid,", "@mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update =", "_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called)", "_vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock", "mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) #", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get',", "LB_ID, provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) #", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock)", "mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock()", "vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\",", "mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id)", "mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock)", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock from oslo_db import exception as odb_exceptions from oslo_utils import uuidutils from sqlalchemy.orm", "load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with", "database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID})", "this file except in compliance with the License. You may obtain # a", "amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2]", "name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) #", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update')", "or implied. See the # License for the specific language governing permissions and", "disable_amp_health = ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid,", "'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer", "update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail')", "amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG,", "SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active", "@mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create", "mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock()", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update,", "result) def test_get_vip_from_loadbalancer(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock", "provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with( 'TEST', POOL_ID,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED)", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID,", "data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy]", "L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect", "from octavia.db import repositories as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid()", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test the", "'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with", "self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor", "mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock)", "def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks.", "mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID,", "listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active =", "= mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor =", "def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock)", "vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG,", "self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create,", "create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB()) mark_member_active.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) #", "(database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock)", "the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update')", "mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid,", "the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail')", "revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the", "_vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock", "mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update,", "'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0,", "VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE", "(database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE) mock_member_repo_update_pool_members.assert_called_once_with( 'TEST', POOL_ID, operating_status=constants.ONLINE)", "mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock) repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert", "= LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id =", "AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=VRRP_ID) @mock.patch('octavia.db.repositories.AmphoraRepository.associate') def test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG,", "Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "(database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock)", "= database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with(", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid,", "result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners", "the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ERROR), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1,", "revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect", "Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid,", "mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "_pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self,", "= [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "oslo_utils import uuidutils from sqlalchemy.orm import exc from taskflow.types import failure from octavia.common", "self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id", "COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip", "delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect =", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "= mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock =", "self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name':", "= database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP)", "HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get,", "L7POLICY_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) #", "# Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the", "mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock()", "mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update,", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id)", "mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "# Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the", "= Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "= database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock)", "self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') #", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST',", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) #", "update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID)", "POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid,", "revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value':", "self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid,", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "# Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) #", "# Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock()", "= {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with(", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) #", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy(", "mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid,", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self,", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock)", "L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details =", "mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self,", "self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock)", "'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session,", "lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB())", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) #", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE)", "the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "VIP_IP = '192.168.127.12' VRRP_IP = '172.16.31.10' HA_IP = '192.168.3.11' AMP_ROLE = 'FAKE_ROLE' VRRP_ID", "id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( #", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "_l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock", "mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock()", "'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with(", "mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock()", "update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def", "_amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip", "mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update,", "is distributed on an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF", "self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb =", "repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id,", "the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp')", "LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_listener =", "'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def", "def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp", "implied. See the # License for the specific language governing permissions and limitations", "= {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID)", "mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID", "provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict])", "Company, L.P. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id,", "mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self,", "MarkMemberPendingUpdateInDB()) mark_member_pending_update.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with(", "= Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG,", "HM_ID, operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "= mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'})", "# Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7rule = database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH,", "# Test the revert mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR)", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id,", "@mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict", "'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, delay=1, timeout=2) # Test the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock)", "'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock,", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db =", "mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks. MarkHealthMonitorPendingCreateInDB())", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID)", "import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo import", "repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid,", "provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks.", "AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP", "= uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID =", "self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB())", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP", "id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock)", "mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert", "LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the revert mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with(", "# TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID,", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) def test_mark_LB_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID,", "mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_member_repo_update.reset_mock() mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID,", "def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST',", "mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock()", "self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid,", "mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update", "LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock()", "action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert", "'TEST', id='AMP') @mock.patch('octavia.db.repositories.ListenerRepository.delete') def test_delete_listener_in_db(self, mock_listener_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "(database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid,", "_listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id", "Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks.", "provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails()", "provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.common.tls_utils.cert_parser.get_cert_expiration', return_value=_cert_mock) def test_update_amphora_db_cert_exp(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock)", "with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error')", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora() amp = reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with(", "mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID,", "provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB()) mark_l7rule_pending_create.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_CREATE) #", "revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, #", "mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert", "status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB() delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the", "= uuidutils.generate_uuid() PORT_ID = uuidutils.generate_uuid() SUBNET_ID = uuidutils.generate_uuid() VRRP_PORT_ID = uuidutils.generate_uuid() HA_PORT_ID =", "base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid()", "= database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock)", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self,", "(database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock)", "'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( #", "mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID)", "(database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock)", "repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert", "# Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with", "'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def", "the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock()", "{'name': 'test', 'description': 'test2'}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') # Test the revert", "the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR)", "@mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool =", "test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update, mock_member_repo_update, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock()", "required by applicable law or agreed to in writing, software # distributed under", "test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_health_mon = database_tasks.DeleteHealthMonitorInDB()", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock)", "database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None)", "mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with", "mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "= mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id =", "the revert mock_amphora_repo_update.reset_mock() mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST',", "# Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)])", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create", "mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "in compliance with the License. You may obtain # a copy of the", "_vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt(", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) #", "provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the", "mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "@mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self):", "'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2,", "None]) def test_map_loadbalancer_to_amphora(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp =", "POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test", "Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG,", "test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock)", "def test_update_health_monitor_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB()", "the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock()", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def", "_listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID:", "mock_amphora_repo_delete): mark_member_pending_delete = (database_tasks. MarkMemberPendingDeleteInDB()) mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the", "amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy", "mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer =", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_delete = (database_tasks. MarkL7RulePendingDeleteInDB()) mark_l7rule_pending_delete.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE)", "with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def", "MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "# Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the", "'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID,", "= database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID:", "self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy())", "# Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "repo.L7PolicyRepository.delete.assert_called_once_with( 'TEST', id=L7POLICY_ID) # Test the revert mock_l7policy_repo_delete.reset_mock() delete_l7policy.revert(_l7policy_mock) # TODO(sbalukoff) Fix #", "IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id = AMP_ID _amphora_mock.compute_id", "Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) #", "exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self,", "= Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid,", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID,", "L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG,", "repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) #", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self,", "_amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP)", "TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, #", "mark_member_pending_delete.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST',", "fernet import mock from oslo_db import exception as odb_exceptions from oslo_utils import uuidutils", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db = (database_tasks. MarkAmphoraPendingUpdateInDB()) mark_amp_pending_update_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_UPDATE) # Test", "def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE,", "LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): mark_busy = ( database_tasks.MarkLBAmphoraeHealthBusy()) mark_busy.execute(_loadbalancer_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid,", "2.0 (the \"License\"); you may # not use this file except in compliance", "mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2})", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test", "provisioning_status=constants.ACTIVE), mock.call('TEST', redirect_pool.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID,", "mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self,", "= VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae =", "# TODO(johnsom) fix once provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID,", "'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert mock_l7rule_repo_update.reset_mock() update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)])", "# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT", "revert no LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert", "provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail')", "with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self,", "POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock", "mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID)", "l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners =", "= Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id)", "'TEST', L7RULE_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock()", "mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id", "mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session,", "provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict)", "2015 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version", "test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy = database_tasks.DeleteL7PolicyInDB() delete_l7policy.execute(_l7policy_mock)", "def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks.", "the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid,", "= [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor)", "test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock,", "Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert", "Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert", "self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id", "'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict)", "Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert", "revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update,", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.PENDING_DELETE) #", "def test_mark_l7policy_pending_update_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks.", "mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock()", "[mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR)", "= data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies = l7policies listeners = [listener1, listener2] pools = [default_pool,", "vrrp_priority=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "= POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, }", "@mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete =", "MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) # Test the", "# Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID,", "mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from", "L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR)", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self,", "vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\"))", "repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix #", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "self.assertEqual(HA_IP, new_amp.ha_ip) self.assertEqual(VRRP_PORT_ID, new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_member = database_tasks.UpdateMemberInDB() update_member.execute(self.member_mock, {'weight': 1,", "'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock()", "'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) # Test the revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect =", "def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID", "L.P. # # Licensed under the Apache License, Version 2.0 (the \"License\"); you", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active =", "mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete,", "mock.MagicMock( return_value={'id': LISTENER_ID}) _listener_mock.id = LISTENER_ID _listener_mock.to_dict = _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock", "data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}]", "= (database_tasks. MarkAmphoraAllocatedInDB()) mark_amp_allocated_in_db.execute(_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_ALLOCATED, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP, load_balancer_id=LB_ID) #", "= database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert", "= reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update,", "'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_update_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_update_in_db", "operating_status=constants.ONLINE, provisioning_status=constants.ACTIVE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) #", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG,", "Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID,", "VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock]", "'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_l7policy_repo_update.reset_mock() mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7policy_repo_update.reset_mock()", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db = (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) #", "@mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7policy =", "database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST',", "database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with(", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener()", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID,", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY)", "revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "test_update_pool_members_operating_status_in_db( self, mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB()", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "# under the License. # import random from cryptography import fernet import mock", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "= Exception('fail') update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG,", "Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id': LISTENER_ID})", "(database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID)", "mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock()", "the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE)", "revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def", "mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID,", "LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.ACTIVE) # Test the revert", "mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch( 'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update,", "mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id,", "def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB())", "MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_delete.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "= Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE)", "the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with", "mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) def test_get_amphora_details(self, mock_generate_uuid, mock_LOG,", "L7RULE_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "delete_health_mon.execute(self.db_pool_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api') # Test the revert", "the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description':", "vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amphora_vip_data2(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data2", "test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id)", "test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock)", "'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR)", "# Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST',", "update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with(", "Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session,", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "mock.MagicMock() _loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID", "constants from octavia.common import data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks", "mock_member_repo_update.side_effect = Exception('fail') mark_member_active.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid,", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_get_session.side_effect = ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock)", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update = (database_tasks. MarkL7RulePendingUpdateInDB()) mark_l7rule_pending_update.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test", "mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "# License for the specific language governing permissions and limitations # under the", "# 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self, mock_l7policy_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "database_tasks.DeletePoolInDB() delete_pool.execute(POOL_ID) repo.PoolRepository.delete.assert_called_once_with( 'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom)", "listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active = database_tasks.MarkLBActiveInDBByListener() mark_loadbalancer_active.execute(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0,", "MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp = database_tasks.ReloadAmphora()", "mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "test_mark_pool_pending_update_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB())", "= 'FAKE_ROLE' VRRP_ID = random.randrange(255) VRRP_PRIORITY = random.randrange(100) CACHED_ZONE = 'zone1' IMAGE_ID =", "'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active", "@mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id", "provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock()", "the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert with", "mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid,", "repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F = database_tasks.UpdateAmphoraCertBusyToFalse() amp_cert_busy_to_F.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update,", "in writing, software # distributed under the License is distributed on an \"AS", "mock_amphora_repo_update.reset_mock() failure_obj = failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID,", "_compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class", "mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock()", "Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def test_mark_amphora_deleted_in_db(self, mock_generate_uuid, mock_LOG,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete')", "lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None}", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT})", "'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom) Fix # repo.MemberRepository.delete.assert_called_once_with(", "= (database_tasks. MarkAmphoraPendingDeleteInDB()) mark_amp_pending_delete_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock()", "revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update')", "mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect", "mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect", "self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase() fer = fernet.Fernet(key) _pem_mock = fer.encrypt( utils.get_six_compatible_value('test_cert') )", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update')", "return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_lb_amphorae_deleted_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result", "the Apache License, Version 2.0 (the \"License\"); you may # not use this", "# Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock)", "provisioning_status=constants.ACTIVE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test", "update_dict = {'name': 'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict)", "provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with(", "mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id,", "[data_models.Listener(id='listener1'), data_models.Listener(id='listener2')] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id,", "self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session,", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_update_amphora_info(self, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "[amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj = database_tasks.GetAmphoraeFromLoadbalancer() result = get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock],", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session,", "revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update')", "update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST',", "mock_amphora_repo_delete): mark_l7rule_active = (database_tasks.MarkL7RuleActiveInDB()) mark_l7rule_active.execute(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the", "def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID:", "= mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID self.l7rule_mock.l7policy =", "# Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID:", "'member4'}] redirect_pool = data_models.Pool(id='redirect_pool', members=members2) l7rules = [data_models.L7Rule(id='rule1')] redirect_policy = data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_update = (database_tasks. MarkPoolPendingUpdateInDB()) mark_pool_pending_update.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_UPDATE) #", "LB_ID, name='test', description='test2') # Test the revert mock_loadbalancer_repo_update.reset_mock() update_load_balancer.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock", "delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock) # TODO(sbalukoff) Fix", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "= L7RULE_ID self.l7rule_mock.l7policy = self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid,", "revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "= get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "= _listener_to_dict_mock _tf_failure_mock = mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id =", "update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert", "mock_amphora_repo_delete): mark_busy = database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj", "update_member.execute(self.member_mock, {'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_active =", "mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_lb = database_tasks.ReloadLoadBalancer() lb =", "the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR,", "= data_models.LoadBalancer(id=LB_ID, listeners=listeners) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', lb.id, provisioning_status=constants.ACTIVE) self.assertEqual(2, repo.ListenerRepository.update.call_count)", "mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception", "delete_listener = database_tasks.DeleteListenerInDB() delete_listener.execute({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock()", "# Test the revert with exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_active.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "= data_models.L7Policy(id='redirect_policy', redirect_pool=redirect_pool, l7rules=l7rules) l7policies = [redirect_policy] listener2 = data_models.Listener(id='listener2', l7policies=l7policies) listener2.l7policies =", "'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning status added #", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_update = (database_tasks. MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID,", "[default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active = database_tasks.MarkLBActiveInDB(mark_subobjects=True) mark_lb_active.execute(lb) repo.LoadBalancerRepository.update.assert_called_once_with(", "= data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 =", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the #", "_l7rule_mock = mock.MagicMock() _l7rule_mock.id = L7RULE_ID _listener_mock = mock.MagicMock() _listener_to_dict_mock = mock.MagicMock( return_value={'id':", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_delete.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB()", "LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_deleted = database_tasks.MarkLBDeletedInDB() mark_loadbalancer_deleted.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.DELETED) # Test", "database_tasks.MarkAmphoraHealthBusy() mark_busy.execute(_amphora_mock) mock_amp_health_repo_update.assert_called_once_with( 'TEST', amphora_id=AMP_ID, busy=True) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_lb_amphorae_health_monitoring_busy( self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG,", "self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock =", "# Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST',", "'TEST', id=POOL_ID) # Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with(", "mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_update_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST',", "return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id =", "provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ERROR)]) def test_mark_LB_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid()", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_master_indb = database_tasks.MarkAmphoraMasterInDB() mark_amp_master_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='MASTER', vrrp_priority=constants.ROLE_MASTER_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_master_indb.revert(\"BADRESULT\",", "mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock,", "id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock)", "mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete = (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "status=constants.DELETED) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test", "# Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect =", "SERVER_GROUP_ID) # Test the revert with exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID)", "'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert LB_ID from listeners", "_vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock", "# Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the", "mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock()", "{'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "[listener1, listener2] pools = [default_pool, redirect_pool, unused_pool] lb = data_models.LoadBalancer(id=LB_ID, listeners=listeners, pools=pools) mark_lb_active", "revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "mock_amphora_repo_delete): sp_dict = {'type': 'SOURCE_IP', 'cookie_name': None} update_dict = {'name': 'test', 'description': 'test2',", "mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test',", "= Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "_amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip = HA_IP _amphora_mock.ha_port_id", "= MEMBER_ID self.db_pool_mock = mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock =", "MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test", "Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID,", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None)", "test_associate_failover_amphora_with_lb_id( self, mock_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID()", "exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST',", "mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None, _amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid,", "# repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get,", "(the \"License\"); you may # not use this file except in compliance with", "AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "revert mock_member_repo_update.reset_mock() mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert with exception", "# Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "exception mock_l7policy_repo_update.reset_mock() mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self,", "test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "= failure.Failure.from_exception(Exception(\"TESTEXCEPT\")) mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "provisioning_status=constants.PENDING_CREATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test", "(database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_amphora_repo_delete): listener_dict = {constants.LISTENER_ID: LISTENER_ID, constants.LOADBALANCER_ID: LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict])", "database_tasks.ReloadLoadBalancer() lb = reload_lb.execute(LB_ID) repo.LoadBalancerRepository.get.assert_called_once_with( 'TEST', id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def", "# # Unless required by applicable law or agreed to in writing, software", "with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def", "'TEST', amphora_id=AMP_ID, busy=True) def test_update_lb_server_group_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info", "mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_create_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) #", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= (database_tasks.MarkPoolPendingDeleteInDB()) mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID)", "Test the revert mock_pool_repo_delete.reset_mock() delete_pool.revert(POOL_ID) # TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', #", "{'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST', LB_ID, qos_policy_id='fool') def test_update_listener_in_db(self, mock_generate_uuid,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST',", "mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_delete = (database_tasks. MarkL7PolicyPendingDeleteInDB()) mark_l7policy_pending_delete.execute(self.l7policy_mock)", "= ( database_tasks.DisableLBAmphoraeHealthMonitoring()) disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG,", "@mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock()", "mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB()) mark_l7policy_pending_create.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) def", "{'weight': 1, 'ip_address': '10.1.0.0'}) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert", "= Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG,", "exception mock_listener_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID,", "MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB())", "= LB_NET_IP mark_amp_ready_in_db = database_tasks.MarkAmphoraReadyInDB() mark_amp_ready_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_READY, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) #", "_loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}],", "= [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result = get_list_from_lb_obj.execute(_loadbalancer_mock) mock_listener_get.assert_called_once_with('TEST', id=_listener_mock.id) self.assertEqual([{constants.LISTENER_ID: LISTENER_ID}], result)", "= Exception('fail') mark_l7policy_pending_create.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_delete_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG,", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_create", "mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout': 2}) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID,", "# Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock)", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'})", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR)", "_vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock() _compute_mock = mock.MagicMock() _compute_mock.lb_network_ip = LB_NET_IP _compute_mock.cached_zone", "test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create = (database_tasks. MarkL7PolicyPendingCreateInDB())", "SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, server_group_id=SERVER_GROUP_ID) # Test the revert mock_listener_repo_update.reset_mock() update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) #", "mock_amphora_repo_update, mock_amphora_repo_delete): unused_pool = data_models.Pool(id='unused_pool') members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor =", "= ['TEST', odb_exceptions.DBDuplicateEntry] create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT,", "'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "_amphora_mock, self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_booting_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "_loadbalancer_mock.id = LB_ID _loadbalancer_mock.amphorae = [_amphora_mock] _l7policy_mock = mock.MagicMock() _l7policy_mock.id = L7POLICY_ID _l7rule_mock", "description='test2') # Test the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test", "TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock", "id='AMP') # Test revert with exception mock_amphora_repo_delete.reset_mock() mock_amphora_repo_delete.side_effect = Exception('fail') create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with(", "mock_l7policy_repo_update.side_effect = Exception('fail') mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_active_in_db(self, mock_l7rule_repo_update, mock_generate_uuid,", "id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_pending_delete_in_db =", "'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID,", "the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom) fix once provisioning", "database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test the revert mock_member_repo_delete.reset_mock() delete_member.revert(self.member_mock) # TODO(johnsom)", "provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)])", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_active = (database_tasks.MarkL7PolicyActiveInDB()) mark_l7policy_active.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', L7POLICY_ID,", "mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock.call('TEST', redirect_pool.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ERROR)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST',", "the revert LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR)", "l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mock_pool_repo_update.reset_mock() mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock()", "mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip':", "repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "LB_ID} mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(LB_ID, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE)", "'octavia.db.repositories.Repositories.update_pool_and_sp') def test_update_pool_in_db(self, mock_repos_pool_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): sp_dict =", "map_lb_to_amp.execute( self.loadbalancer_mock.id, availability_zone={ constants.COMPUTE_ZONE: 'fakeaz'}) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, 'fakeaz') self.assertEqual(_amphora_mock.id, amp_id) amp_id =", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE) # Test", "id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock)", "provisioning status added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def", "= Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details = database_tasks.UpdateAmpFailoverDetails() update_amp_fo_details.execute(_amphora_mock, _amphora_mock)", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_active = (database_tasks.MarkMemberActiveInDB())", "health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id,", "database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_allocated_in_db = (database_tasks.", "update_server_group_info.revert(LB_ID, SERVER_GROUP_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_active_in_db(self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH,", "mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect", "the revert mock_listener_repo_update.reset_mock() update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert mock_listener_repo_update.reset_mock()", "exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_create_in_db(self,", "mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) @mock.patch('octavia.db.repositories.AmphoraRepository.get') def", "test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name':", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_active = (database_tasks.MarkPoolActiveInDB()) mark_pool_active.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "under the Apache License, Version 2.0 (the \"License\"); you may # not use", "mock_amphora_repo_update, mock_amphora_repo_delete): mock_listener_get.return_value = _listener_mock _loadbalancer_mock.listeners = [_listener_mock] get_list_from_lb_obj = database_tasks.GetListenersFromLoadbalancer() result =", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID,", "WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See", "= AMP_ID _amphora_mock.compute_id = COMPUTE_ID _amphora_mock.lb_network_ip = LB_NET_IP _amphora_mock.vrrp_ip = VRRP_IP _amphora_mock.ha_ip =", "id=LB_ID) self.assertEqual(_loadbalancer_mock, lb) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session,", "ha_port_id=HA_PORT_ID, vrrp_id=1) def test_update_amp_failover_details(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amp_fo_details =", "= create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert", "HA_PORT_ID = uuidutils.generate_uuid() L7POLICY_ID = uuidutils.generate_uuid() L7RULE_ID = uuidutils.generate_uuid() VIP_IP = '192.168.127.12' VRRP_IP", "amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_update.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update_pool_members') def test_update_pool_members_operating_status_in_db(", "# Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the", "self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_by_listener(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict =", "class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID", "} self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID self.l7rule_mock = mock.MagicMock() self.l7rule_mock.id = L7RULE_ID", "provisioning_status=constants.ACTIVE) # Test with no LB_ID mock_loadbalancer_repo_update.reset_mock() mark_lb_and_listeners_active.execute(None, []) mock_loadbalancer_repo_update.assert_not_called() # Test the", "mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp_cert_busy_to_F", "mock_member_repo_update.reset_mock() mock_hm_repo_update.reset_mock() mock_l7p_repo_update.reset_mock() mock_l7r_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST',", "= (database_tasks. MarkHealthMonitorPendingCreateInDB()) mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock()", "status=constants.PENDING_CREATE, cert_busy=False) self.assertEqual(_amphora_mock.id, amp_id) # Test the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called)", "map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock() mock_loadbalancer_repo_update.side_effect", "repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ACTIVE), mock.call('TEST', listeners[1].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST', default_pool.id, provisioning_status=constants.ACTIVE),", "provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_active.revert(lb) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=lb.id, provisioning_status=constants.ERROR) self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id,", "mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock = mock.MagicMock()", "mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.ListenerRepository.' 'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _loadbalancer_mock.vip = _vip_mock get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create", "test_mark_listener_pending_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_pending_delete = (database_tasks. MarkListenerPendingDeleteInDB()) mark_listener_pending_delete.execute(self.listener_mock)", "new_amp.vrrp_port_id) self.assertEqual(AMP_ROLE, new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_amphora_repo_delete): update_amp_vip_data = database_tasks.UpdateAmphoraeVIPData() update_amp_vip_data.execute(_amphorae) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1)", "@mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_pending_create_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_create =", "POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) #", "Exception('fail') mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_active_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session,", "AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb", "# TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock)", "# Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_create.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "MEMBER_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_member_repo_update.reset_mock() mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) #", "def test_get_amphorae_from_loadbalancer(self, mock_amphora_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): amp1 = mock.MagicMock()", "mock_amphora_repo_delete): update_amp_vip_data2 = database_tasks.UpdateAmphoraVIPData() update_amp_vip_data2.execute(_amphorae[0]) mock_amphora_repo_update.assert_called_once_with( 'TEST', AMP_ID, vrrp_ip=VRRP_IP, ha_ip=HA_IP, vrrp_port_id=VRRP_PORT_ID, ha_port_id=HA_PORT_ID, vrrp_id=1)", "mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID)", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_health_mon = database_tasks.UpdateHealthMonInDB() update_health_mon.execute(self.health_mon_mock, {'delay': 1, 'timeout':", "get_amps_from_lb_obj.execute(lb) self.assertEqual([_amphora_mock], result) @mock.patch('octavia.db.repositories.ListenerRepository.get') def test_get_listeners_from_loadbalancer(self, mock_listener_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mark_pool_pending_delete.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_DELETE) # Test the revert mock_pool_repo_update.reset_mock() mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_loadbalancer_pending_delete = (database_tasks. MarkLBPendingDeleteInDB()) mark_loadbalancer_pending_delete.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.PENDING_DELETE)", "'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect", "id=L7RULE_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_delete_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "an \"AS IS\" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either", "TODO(sbalukoff) Fix # repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.delete') def test_delete_l7rule_in_db(self,", "repo.ListenerRepository.update.assert_called_once_with( # 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid,", "Test the revert mock_pool_repo_update.reset_mock() mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) # Test the revert", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_ready_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): get_amp_details = database_tasks.GetAmphoraDetails() new_amp = get_amp_details.execute(_amphora_mock) self.assertEqual(AMP_ID, new_amp.id) self.assertEqual(VRRP_IP,", "id=AMP_ID, status=constants.DELETED) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_mark_amphora_allocated_in_db(self, mock_loadbalancer_repo_get, mock_amphora_repo_get, mock_generate_uuid, mock_LOG, mock_get_session,", "repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= '192.0.2.2' LISTENER_ID = uuidutils.generate_uuid() POOL_ID = uuidutils.generate_uuid() HM_ID = uuidutils.generate_uuid() MEMBER_ID =", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect =", "mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self, mock_member_repo_update, mock_generate_uuid,", "'TEST', AMP_ID, role=None, vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb = database_tasks.MarkAmphoraStandAloneInDB() mark_amp_standalone_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='STANDALONE',", "@mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): reload_amp", "data_models from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "create_vrrp_group = database_tasks.CreateVRRPGroupForLB() create_vrrp_group.execute(_loadbalancer_mock) mock_vrrp_group_create.assert_called_once_with( 'TEST', load_balancer_id=LB_ID, vrrp_group_name=LB_ID.replace('-', ''), vrrp_auth_type=constants.VRRP_AUTH_DEFAULT, vrrp_auth_pass=mock_generate_uuid.return_value.replace('-', '')[0:7], advert_int=1)", "database_tasks.UpdateL7RuleInDB() update_l7rule.execute( self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH,", "delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST', id=L7RULE_ID) # Test the revert mock_l7rule_repo_delete.reset_mock() delete_l7rule.revert(_l7rule_mock)", "from octavia.common import utils from octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as", "reload_amp.execute(AMP_ID) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) self.assertEqual(_amphora_mock, amp) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def test_reload_load_balancer(self, mock_lb_get, mock_generate_uuid, mock_LOG,", "mark_amp_master_indb.revert(failure_obj, _amphora_mock) self.assertFalse(repo.AmphoraRepository.update.called) mock_amphora_repo_update.reset_mock() mark_amp_backup_indb = database_tasks.MarkAmphoraBackupInDB() mark_amp_backup_indb.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role='BACKUP', vrrp_priority=constants.ROLE_BACKUP_PRIORITY)", "mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_lb_amphorae_health_monitoring( self,", "def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): map_lb_to_amp = database_tasks.MapLoadbalancerToAmphora()", "self.l7policy_mock super(TestDatabaseTasks, self).setUp() @mock.patch('octavia.db.repositories.AmphoraRepository.create', return_value=_amphora_mock) def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete = (database_tasks. MarkHealthMonitorPendingDeleteInDB()) mark_health_mon_pending_delete.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_DELETE) # Test", "return_value=_loadbalancer_mock) @mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_after_allocation(self, mock_vip_update, mock_loadbalancer_get, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "self.assertEqual(2, repo.ListenerRepository.update.call_count) repo.ListenerRepository.update.has_calls( [mock.call('TEST', listeners[0].id, provisioning_status=constants.ERROR), mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) self.assertEqual(2, repo.PoolRepository.update.call_count) repo.PoolRepository.update.has_calls( [mock.call('TEST',", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "= {constants.LISTENER_ID: LISTENER_ID} update_listener.execute(listener_dict, {'name': 'test', 'description': 'test2'}) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, name='test', description='test2')", "def test_mark_amphora_ready_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): _amphora_mock.lb_network_ip = LB_NET_IP mark_amp_ready_in_db", "def test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health =", "repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test Not Found Exception mock_health_mon_repo_delete.reset_mock() mock_health_mon_repo_delete.side_effect = [exc.NoResultFound()]", "provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)])", "Test the revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = database_tasks.MarkAmphoraDeletedInDB() mark_amp_deleted_in_db.execute(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.DELETED)", "Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = database_tasks.DisableAmphoraHealthMonitoring() disable_amp_health.execute(_amphora_mock)", "MarkHealthMonitorPendingUpdateInDB()) mark_health_mon_pending_update.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with(", "= HA_IP _amphora_mock.ha_port_id = HA_PORT_ID _amphora_mock.vrrp_port_id = VRRP_PORT_ID _amphora_mock.role = AMP_ROLE _amphora_mock.vrrp_id =", "self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7RuleRepository.update.call_count) repo.L7RuleRepository.update.has_calls( [mock.call('TEST', l7rules[0].id, provisioning_status=constants.ACTIVE)]) mock_loadbalancer_repo_update.reset_mock()", "= database_tasks.MapLoadbalancerToAmphora() amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id =", "members=members1, health_monitor=health_monitor) listener1 = data_models.Listener(id='listener1', default_pool=default_pool) members2 = [{constants.MEMBER_ID: 'member3'}, {constants.MEMBER_ID: 'member4'}] redirect_pool", "= [exc.NoResultFound()] delete_health_mon.execute(self.health_mon_mock) repo.HealthMonitorRepository.delete.assert_called_once_with( 'TEST', id=HM_ID) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete') def test_delete_health_monitor_in_db_by_pool(self, mock_health_mon_repo_delete, mock_health_mon_repo_update, mock_generate_uuid,", "@mock.patch('octavia.db.repositories.VipRepository.update') def test_update_vip_in_db_during_update_loadbalancer(self, mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id =", "TODO(johnsom) Fix # repo.PoolRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.delete') def test_delete_l7policy_in_db(self,", "mock.MagicMock() self.db_pool_mock.id = POOL_ID self.db_pool_mock.health_monitor = self.health_mon_mock self.member_mock = { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID:", "'prov_status_active_if_not_error') def test_mark_lb_and_listeners_active_in_db(self, mock_list_not_error, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict =", "disable_amp_health.execute(_loadbalancer_mock) mock_amp_health_repo_delete.assert_called_once_with( 'TEST', amphora_id=AMP_ID) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.update') def test_mark_amphora_health_monitoring_busy(self, mock_amp_health_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "amp2.id = uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock,", "self.loadbalancer_mock.id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "new_amp.role) self.assertEqual(VRRP_ID, new_amp.vrrp_id) self.assertEqual(VRRP_PRIORITY, new_amp.vrrp_priority) def test_mark_amphora_role_indb(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "MEMBER_ID, weight=1, ip_address='10.1.0.0') # Test the revert mock_member_repo_update.reset_mock() update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.ERROR)", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') mark_health_mon_pending_update.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_mark_l7policy_active_in_db(self,", "'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "@mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_delete_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_delete", "None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with(", "Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) # Test the revert", "mark_member_pending_delete.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "Test the revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID,", "test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with(", "Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_lb_and_listeners_active.revert(LB_ID, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_deleted_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) def test_mark_amphora_pending_delete_in_db(self,", "revert with exception mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') mark_member_pending_create.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update')", "id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora_with_az(self, mock_allocate_and_associate, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "= Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "'TEST', POOL_ID, {'provisioning_status': constants.ERROR}) @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) # Test the revert mock_l7policy_repo_update.reset_mock() update_l7policy.revert(self.l7policy_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST',", "= mock.MagicMock() self.loadbalancer_mock.id = LB_ID self.member_mock = mock.MagicMock() self.member_mock.id = MEMBER_ID self.db_pool_mock =", "Test the revert with exceptions mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail')", "id=AMP_ID, status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_allocated_in_db.revert(None,", "{'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}}) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, name='test', description='test2') repo.VipRepository.update.assert_called_once_with('TEST',", "repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, update_dict) # Test the revert mock_repos_pool_update.reset_mock() update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID,", "exception mock_health_mon_repo_update.reset_mock() mock_health_mon_repo_update.side_effect = Exception('fail') update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) def test_update_load_balancer_in_db(self, mock_generate_uuid,", "= mock.MagicMock() amp1.id = uuidutils.generate_uuid() amp2 = mock.MagicMock() amp2.id = uuidutils.generate_uuid() lb =", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) # Test with LB_ID from listeners mock_loadbalancer_repo_update.reset_mock() mock_list_not_error.reset_mock() listener_dict", "mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, [listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "self.assertEqual(1, repo.HealthMonitorRepository.update.call_count) repo.HealthMonitorRepository.update.has_calls( [mock.call('TEST', health_monitor.id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1, repo.L7PolicyRepository.update.call_count) repo.L7PolicyRepository.update.has_calls( [mock.call('TEST', l7policies[0].id, provisioning_status=constants.ACTIVE)]) self.assertEqual(1,", "mock_vip_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer =", "= mock.Mock(spec=failure.Failure) _vip_mock = mock.MagicMock() _vip_mock.port_id = PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address =", "from taskflow.types import failure from octavia.common import constants from octavia.common import data_models from", "update_vip.execute(LB_ID, _vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self,", "exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def", "import repositories as repo import octavia.tests.unit.base as base AMP_ID = uuidutils.generate_uuid() COMPUTE_ID =", "'TEST', MEMBER_ID, provisioning_status=constants.ERROR) # Test the revert mock_member_repo_update.reset_mock() mock_member_repo_update.side_effect = Exception('fail') update_member.revert(self.member_mock) repo.MemberRepository.update.assert_called_once_with(", "repo.ListenerRepository.delete.assert_called_once_with( 'TEST', id=LISTENER_ID) # Test the revert repo.ListenerRepository.delete.reset_mock() delete_listener.revert({constants.LISTENER_ID: LISTENER_ID}) repo.ListenerRepository.delete.assert_not_called() @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.delete')", "test_mark_l7rule_pending_create_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_create = (database_tasks. MarkL7RulePendingCreateInDB())", "Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert", "revert map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test revert with exception repo.LoadBalancerRepository.update.reset_mock()", "L7POLICY_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.L7RuleRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') def test_update_l7rule_in_db(self, mock_l7rule_repo_update, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update,", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_amp_deleted_in_db = (database_tasks. MarkLBAmphoraeDeletedInDB()) mark_amp_deleted_in_db.execute(_loadbalancer_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.DELETED)", "'TEST', AMP_ID, status=constants.PENDING_DELETE) # Test the revert mock_amphora_repo_update.reset_mock() mark_amp_pending_delete_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR)", "'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID, lb_network_ip=LB_NET_IP) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect", "id=HM_ID) # Test the revert mock_health_mon_repo_delete.reset_mock() delete_health_mon.revert(self.db_pool_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # TODO(johnsom)", "revert mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') update_listener.revert(listener_dict) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_update_member_in_db(self,", "the revert mock_l7policy_repo_update.reset_mock() mark_l7policy_pending_update.revert(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with( 'TEST', id=L7POLICY_ID, provisioning_status=constants.ERROR) # Test the revert with", "= Exception('fail') map_lb_to_amp.revert(None, self.loadbalancer_mock.id) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) @mock.patch('octavia.db.repositories.LoadBalancerRepository.get', return_value=_loadbalancer_mock) def", "mark_loadbalancer_active = database_tasks.MarkLBActiveInDB() mark_loadbalancer_active.execute(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the", "= fer.encrypt( utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid,", "mark_health_mon_pending_create.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_create = (database_tasks. MarkMemberPendingCreateInDB()) mark_member_pending_create.execute(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', MEMBER_ID, provisioning_status=constants.PENDING_CREATE)", "advert_int=1) create_vrrp_group.execute(_loadbalancer_mock) @mock.patch('octavia.db.repositories.AmphoraHealthRepository.delete') def test_disable_amphora_health_monitoring(self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "'test', 'description': 'test2', 'session_persistence': sp_dict} update_pool = database_tasks.UpdatePoolInDB() update_pool.execute(POOL_ID, update_dict) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_lb_update, mock_listener_update, mock_amphora_update, mock_amphora_delete): self.loadbalancer_mock.vip.load_balancer_id = LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB()", "mock_pool_repo_update.side_effect = Exception('fail') mark_pool_active.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_create_in_db(self, mock_pool_repo_update, mock_generate_uuid,", "members1 = [{constants.MEMBER_ID: 'member1'}, {constants.MEMBER_ID: 'member2'}] health_monitor = data_models.HealthMonitor(id='hm1') default_pool = data_models.Pool(id='default_pool', members=members1,", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_pending_create.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_busy=False) def test_mark_LB_active_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete):", "self.l7rule_mock, {'type': constants.L7RULE_TYPE_PATH, 'compare_type': constants.L7RULE_COMPARE_TYPE_STARTS_WITH, 'value': '/api'}) repo.L7RuleRepository.update.assert_called_once_with( 'TEST', L7RULE_ID, type=constants.L7RULE_TYPE_PATH, compare_type=constants.L7RULE_COMPARE_TYPE_STARTS_WITH, value='/api')", "lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE, image_id=IMAGE_ID, compute_flavor=COMPUTE_FLAVOR) repo.AmphoraRepository.get.assert_called_once_with( 'TEST', id=AMP_ID) def test_mark_listener_deleted_in_db(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "mock_l7rule_repo_update.assert_called_once_with( 'TEST', L7RULE_ID, provisioning_status=constants.ACTIVE, operating_status=constants.ONLINE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_active.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks. MarkMemberPendingUpdateInDB())", "exc from taskflow.types import failure from octavia.common import constants from octavia.common import data_models", "uuidutils.generate_uuid() COMPUTE_ID = uuidutils.generate_uuid() LB_ID = uuidutils.generate_uuid() SERVER_GROUP_ID = uuidutils.generate_uuid() LB_NET_IP = '192.0.2.2'", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listener_dict = {'loadbalancer_id': LB_ID} mark_loadbalancer_active =", "added # repo.HealthMonitorRepository.update.assert_called_once_with( # 'TEST', # POOL_ID, # provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.delete') def test_delete_member_in_db(self, mock_member_repo_delete,", "map_lb_to_amp.execute(self.loadbalancer_mock.id) repo.AmphoraRepository.allocate_and_associate.assert_called_once_with( 'TEST', LB_ID, None) self.assertEqual(_amphora_mock.id, amp_id) amp_id = map_lb_to_amp.execute(self.loadbalancer_mock.id) self.assertIsNone(amp_id) # Test", "the revert create_amp_in_db.revert(_tf_failure_mock) self.assertFalse(mock_amphora_repo_delete.called) mock_amphora_repo_delete.reset_mock() create_amp_in_db.revert(result='AMP') self.assertTrue(mock_amphora_repo_delete.called) mock_amphora_repo_delete.assert_called_once_with( 'TEST', id='AMP') # Test revert", "mark_member_pending_update.revert(self.member_mock) mock_member_repo_update.assert_called_once_with( 'TEST', id=MEMBER_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_active_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update,", "CACHED_ZONE = 'zone1' IMAGE_ID = uuidutils.generate_uuid() COMPUTE_FLAVOR = uuidutils.generate_uuid() _amphora_mock = mock.MagicMock() _amphora_mock.id", "utils.get_six_compatible_value('test_cert') ) update_amp_cert.execute(_amphora_mock.id, _pem_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, cert_expiration=_cert_mock) def test_update_amphora_cert_busy_to_false(self, mock_generate_uuid, mock_LOG, mock_get_session,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_l7rule = database_tasks.DeleteL7RuleInDB() delete_l7rule.execute(_l7rule_mock) repo.L7RuleRepository.delete.assert_called_once_with( 'TEST',", "_vip_mock) self.assertEqual(_loadbalancer_mock, loadbalancer) mock_vip_update.assert_called_once_with('TEST', LB_ID, port_id=PORT_ID, subnet_id=SUBNET_ID, ip_address=VIP_IP) mock_loadbalancer_get.assert_called_once_with('TEST', id=LB_ID) def test_update_amphora_vip_data(self, mock_generate_uuid,", "amphora_id=AMP_ID) # Test revert assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) # Test revert with exception", "# Test the revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_pending_delete.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "= LB_ID update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test', 'description': 'test2', 'vip': {'qos_policy_id': 'fool'}})", "mark_pool_pending_create = (database_tasks.MarkPoolPendingCreateInDB()) mark_pool_pending_create.execute(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', POOL_ID, provisioning_status=constants.PENDING_CREATE) # Test the revert mock_pool_repo_update.reset_mock()", "mark_amp_booting_in_db = database_tasks.MarkAmphoraBootingInDB() mark_amp_booting_in_db.execute(_amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.AMPHORA_BOOTING, compute_id=COMPUTE_ID) # Test the", "PORT_ID _vip_mock.subnet_id = SUBNET_ID _vip_mock.ip_address = VIP_IP _vrrp_group_mock = mock.MagicMock() _cert_mock = mock.MagicMock()", "provisioning_status=constants.ERROR) # Test the revert with exception mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_create.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with(", "mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB() amp_id = create_amp_in_db.execute() repo.AmphoraRepository.create.assert_called_once_with( 'TEST',", "test_update_l7policy_in_db(self, mock_l7policy_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock,", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete, mock_get_cert_exp): update_amp_cert = database_tasks.UpdateAmphoraDBCertExpiration() key = utils.get_six_compatible_server_certs_key_passphrase()", "LB_ID mock_loadbalancer_repo_update.reset_mock() mock_listener_repo_update.reset_mock() mark_lb_and_listeners_active.revert(None, []) mock_loadbalancer_repo_update.assert_not_called() mock_listener_repo_update.assert_not_called() # Test the revert with exceptions", "the revert mock_health_mon_repo_update.reset_mock() update_health_mon.revert(self.health_mon_mock) repo.HealthMonitorRepository.update.assert_called_once_with( 'TEST', HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7policy_pending_update = (database_tasks. MarkL7PolicyPendingUpdateInDB()) mark_l7policy_pending_update.execute(self.l7policy_mock) mock_l7policy_repo_update.assert_called_once_with(", "@mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_l7rule_pending_update_in_db(self, mock_l7rule_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_l7rule_pending_update =", "'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update,", "mock_amphora_repo_update.reset_mock() mark_amp_booting_in_db.revert(None, _amphora_mock.id, _amphora_mock.compute_id) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, status=constants.ERROR, compute_id=COMPUTE_ID) # Test the revert", "def test_create_amphora_in_db(self, mock_create, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): create_amp_in_db = database_tasks.CreateAmphoraInDB()", "def test_mark_member_pending_update_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_update = (database_tasks.", "# repo.MemberRepository.delete.assert_called_once_with( # 'TEST', # MEMBER_ID) @mock.patch('octavia.db.repositories.PoolRepository.delete') def test_delete_pool_in_db(self, mock_pool_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session,", "AMP_ROLE _amphora_mock.vrrp_id = VRRP_ID _amphora_mock.vrrp_priority = VRRP_PRIORITY _amphorae = [_amphora_mock] _loadbalancer_mock = mock.MagicMock()", "Exception('fail') mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') def test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG,", "= IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid',", "mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): delete_member = database_tasks.DeleteMemberInDB() delete_member.execute(self.member_mock) repo.MemberRepository.delete.assert_called_once_with( 'TEST', id=MEMBER_ID) # Test", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_active = (database_tasks.MarkHealthMonitorActiveInDB()) mark_health_mon_active.execute(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST',", "@mock.patch('octavia.db.repositories.AmphoraRepository.update') @mock.patch('octavia.db.repositories.ListenerRepository.update') @mock.patch('octavia.db.repositories.LoadBalancerRepository.update') @mock.patch('octavia.db.api.get_session', return_value='TEST') @mock.patch('octavia.controller.worker.v2.tasks.database_tasks.LOG') @mock.patch('oslo_utils.uuidutils.generate_uuid', return_value=AMP_ID) class TestDatabaseTasks(base.TestCase): def setUp(self): self.health_mon_mock", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.MemberRepository.update') def test_mark_member_pending_delete_in_db(self, mock_member_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_member_pending_delete", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_vip = database_tasks.UpdateVIPAfterAllocation() loadbalancer = update_vip.execute(LB_ID, _vip_mock)", "mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(listener_dict) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0, repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self,", "get_vip_from_lb_obj = database_tasks.GetVipFromLoadbalancer() result = get_vip_from_lb_obj.execute(_loadbalancer_mock) self.assertEqual(_vip_mock, result) @mock.patch('octavia.db.repositories.VRRPGroupRepository.create') def test_create_vrrp_group_for_lb(self, mock_vrrp_group_create, mock_generate_uuid,", "self.health_mon_mock = mock.MagicMock() self.health_mon_mock.id = HM_ID self.health_mon_mock.pool_id = POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id", "octavia.controller.worker.v2.tasks import database_tasks from octavia.db import repositories as repo import octavia.tests.unit.base as base", "revert with exception mock_loadbalancer_repo_update.reset_mock() mock_loadbalancer_repo_update.side_effect = Exception('fail') mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) self.assertEqual(0,", "= { constants.MEMBER_ID: MEMBER_ID, constants.POOL_ID: POOL_ID, } self.l7policy_mock = mock.MagicMock() self.l7policy_mock.id = L7POLICY_ID", "status=constants.ERROR) # Test the revert with exception mock_amphora_repo_update.reset_mock() mock_amphora_repo_update.side_effect = Exception('fail') mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with(", "mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test the revert with exception mock_listener_repo_update.reset_mock()", "mock.call('TEST', listeners[1].id, provisioning_status=constants.ERROR)]) @mock.patch('octavia.db.repositories.PoolRepository.update') @mock.patch('octavia.db.repositories.MemberRepository.update') @mock.patch('octavia.db.repositories.HealthMonitorRepository.update') @mock.patch('octavia.db.repositories.L7PolicyRepository.update') @mock.patch('octavia.db.repositories.L7RuleRepository.update') def test_mark_LB_active_in_db_full_graph(self, mock_l7r_repo_update, mock_l7p_repo_update, mock_hm_repo_update,", "mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_load_balancer = database_tasks.UpdateLoadbalancerInDB() update_load_balancer.execute(self.loadbalancer_mock, {'name': 'test',", "[listener_dict]) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test the revert", "the revert mock_health_mon_repo_update.reset_mock() mark_health_mon_active.revert(self.health_mon_mock) mock_health_mon_repo_update.assert_called_once_with( 'TEST', id=HM_ID, provisioning_status=constants.ERROR) # Test the revert with", "test_mark_health_mon_pending_create_in_db( self, mock_health_mon_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_health_mon_pending_create = (database_tasks.", "uuidutils.generate_uuid() lb = mock.MagicMock() lb.amphorae = [amp1, amp2] mock_amphora_get.side_effect = [_amphora_mock, None] get_amps_from_lb_obj", "provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_delete_in_db(self, mock_pool_repo_update, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): mark_pool_pending_delete", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): assoc_fo_amp_lb_id = database_tasks.AssociateFailoverAmphoraWithLBID() assoc_fo_amp_lb_id.execute(AMP_ID, LB_ID) mock_associate.assert_called_once_with('TEST', load_balancer_id=LB_ID, amphora_id=AMP_ID) # Test", "mock_amphora_repo_delete): mark_listener_deleted = database_tasks.MarkListenerDeletedInDB() mark_listener_deleted.execute(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert", "repo.ListenerRepository.update.call_count) def test_mark_LB_active_in_db_and_listeners(self, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): listeners = [data_models.Listener(id='listener1'),", "provisioning_status=constants.PENDING_DELETE) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_pending_delete.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) # Test", "mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_amphora_info = database_tasks.UpdateAmphoraInfo() update_amphora_info.execute(AMP_ID, _compute_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, lb_network_ip=LB_NET_IP, cached_zone=CACHED_ZONE,", "mock_member_repo_update_pool_members, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_members = database_tasks.UpdatePoolMembersOperatingStatusInDB() update_members.execute(POOL_ID, constants.ONLINE)", "revert with exception mock_repos_pool_update.reset_mock() mock_repos_pool_update.side_effect = Exception('fail') update_pool.revert(POOL_ID) repo.Repositories.update_pool_and_sp.assert_called_once_with( 'TEST', POOL_ID, {'provisioning_status': constants.ERROR})", "POOL_ID self.listener_mock = mock.MagicMock() self.listener_mock.id = LISTENER_ID self.loadbalancer_mock = mock.MagicMock() self.loadbalancer_mock.id = LB_ID", "mock_amphora_repo_update.side_effect = Exception('fail') assoc_fo_amp_lb_id.revert(AMP_ID) mock_amphora_repo_update.assert_called_once_with('TEST', AMP_ID, loadbalancer_id=None) @mock.patch('octavia.db.repositories.AmphoraRepository.' 'allocate_and_associate', side_effect=[_amphora_mock, None]) def test_map_loadbalancer_to_amphora(self,", "provisioning_status=constants.ERROR) # Test the revert mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') update_l7rule.revert(self.l7rule_mock) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID,", "mock_amphora_repo_update, mock_amphora_repo_delete): update_l7policy = database_tasks.UpdateL7PolicyInDB() update_l7policy.execute(self.l7policy_mock, {'action': constants.L7POLICY_ACTION_REJECT}) repo.L7PolicyRepository.update.assert_called_once_with( 'TEST', L7POLICY_ID, action=constants.L7POLICY_ACTION_REJECT) #", "mark_lb_and_listeners_active = (database_tasks. MarkLBAndListenersActiveInDB()) mark_lb_and_listeners_active.execute(None, [listener_dict]) mock_list_not_error.assert_called_once_with('TEST', LISTENER_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) #", "role='STANDALONE', vrrp_priority=None) mock_amphora_repo_update.reset_mock() mark_amp_standalone_indb.revert(\"BADRESULT\", _amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', AMP_ID, role=None, vrrp_priority=None) # Test revert", "L7RULE_ID, provisioning_status=constants.PENDING_UPDATE) # Test the revert mock_l7rule_repo_update.reset_mock() mark_l7rule_pending_update.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST', id=L7RULE_ID, provisioning_status=constants.ERROR) #", "revert with exception mock_listener_repo_update.reset_mock() mock_listener_repo_update.side_effect = Exception('fail') mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID, provisioning_status=constants.ERROR) def", "# Test the revert with exception mock_l7rule_repo_update.reset_mock() mock_l7rule_repo_update.side_effect = Exception('fail') mark_l7rule_pending_delete.revert(self.l7rule_mock) mock_l7rule_repo_update.assert_called_once_with( 'TEST',", "mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): update_server_group_info = database_tasks.UpdateLBServerGroupInDB() update_server_group_info.execute(LB_ID, SERVER_GROUP_ID) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST',", "revert mock_amphora_repo_update.reset_mock() mark_amp_pending_update_in_db.revert(_amphora_mock) repo.AmphoraRepository.update.assert_called_once_with( 'TEST', id=AMP_ID, status=constants.ERROR) # Test the revert with exception", "= LB_NET_IP _compute_mock.cached_zone = CACHED_ZONE _compute_mock.image_id = IMAGE_ID _compute_mock.compute_flavor = COMPUTE_FLAVOR @mock.patch('octavia.db.repositories.AmphoraRepository.delete') @mock.patch('octavia.db.repositories.AmphoraRepository.update')", "# 'TEST', # LISTENER_ID, # operating_status=constants.ERROR) @mock.patch('octavia.db.repositories.AmphoraRepository.get', return_value=_amphora_mock) def test_reload_amphora(self, mock_amp_get, mock_generate_uuid, mock_LOG,", "repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', LB_ID, provisioning_status=constants.ACTIVE) self.assertEqual(0, repo.ListenerRepository.update.call_count) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_active.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with(", "provisioning_status=constants.DELETED) # Test the revert mock_loadbalancer_repo_update.reset_mock() mark_loadbalancer_deleted.revert(self.loadbalancer_mock) repo.LoadBalancerRepository.update.assert_called_once_with( 'TEST', id=LB_ID, provisioning_status=constants.ERROR) # Test", "mock_pool_repo_update.reset_mock() mock_pool_repo_update.side_effect = Exception('fail') mark_pool_pending_delete.revert(POOL_ID) mock_pool_repo_update.assert_called_once_with( 'TEST', id=POOL_ID, provisioning_status=constants.ERROR) @mock.patch('octavia.db.repositories.PoolRepository.update') def test_mark_pool_pending_update_in_db(self, mock_pool_repo_update,", "test_disable_lb_amphorae_health_monitoring( self, mock_amp_health_repo_delete, mock_generate_uuid, mock_LOG, mock_get_session, mock_loadbalancer_repo_update, mock_listener_repo_update, mock_amphora_repo_update, mock_amphora_repo_delete): disable_amp_health = (", "repo.ListenerRepository.update.assert_called_once_with( 'TEST', LISTENER_ID, provisioning_status=constants.DELETED) # Test the revert mock_listener_repo_update.reset_mock() mark_listener_deleted.revert(self.listener_mock) repo.ListenerRepository.update.assert_called_once_with( 'TEST', id=LISTENER_ID," ]
[ "'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom':", "if number_of_pages > 1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results =", "content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not", "Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'})", "\" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit()", "#{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id':", "{'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'})", "content of a page from the url html = requests.get(url) # Parse the", "soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\", "{'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id':", "record is stored : \" + email) connection.close() except: print(str(i_1) + \" The", "Exception as e: print(\"There is an error connection at url : \" +", "not None: print(url_search) for result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div',", "activites = [ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id':", "{'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the", "'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url':", "#{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id':", "no email business') else: print('sorry there is nothing') if number_of_pages > 1: for", "email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : '", "soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result", "{'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id':", "+ str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' +", "= \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" +", "{'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self):", "\"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email))", "'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis", "'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom':", "a page from the url html = requests.get(url) # Parse the content of", "{'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id':", "'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom':", "'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga", "'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa", "email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\"", "'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url':", "#{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id':", "print(str(i_1) + \" The record is stored : \" + email) connection.close() except:", "'209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212',", "email : \" + email + \" \" + str(e)) else: print(str(i_1) +", "from the url html = requests.get(url) # Parse the content of html_doc soup", "{'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id':", "'203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206',", "{'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai", "requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'})", "'213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216',", "str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for", "\\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1)", "'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė", "record already exists : \" + email) connection.close() except Exception as e: print(str(i_1)", "> 1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search +", "'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai", "if soup.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + soup.find('a', {'itemprop':", "unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of", "'208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211',", "email : ' + email) else: print(str(i_1) + ' no email business') else:", "#{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id':", "{'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id':", "{'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id':", "= \"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`)", "number_of_pages > 1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search", "\\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels',", "print(url_search) for result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}):", "try: for capitale in capitales_du_monde: for activite in activites: try: activity = activite.get(\"url\")", "'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'},", "#{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id':", "not None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class':", "#{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id':", "Request the content of a page from the url html = requests.get(url) #", "import time from bs4 import BeautifulSoup import requests import pymysql.cursors import unittest class", "print(str(i_1) + ' no email business') else: print('sorry there is nothing') if number_of_pages", "'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text", "\" The record already exists : \" + email) connection.close() except Exception as", "{'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ]", ": \" + email) connection.close() except: print(str(i_1) + \" The record already exists", "'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url':", "{'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href')", "{'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' + email) else: print(str(i_1) +", "'188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191',", "'25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28',", "'225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228',", "activity = \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\"", "'15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18',", "'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom':", "in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results)", "'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href')", "print(str(i_1) + ' no email business') else: print('sorry there is nothing') except Exception", "'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom':", "import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request", ".find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages :", "'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url':", "'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom':", "+ \" The record is stored : \" + email) connection.close() except: print(str(i_1)", "is not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}):", "'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'},", "'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom':", "in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result =", "'227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230',", "\"/\" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0", "'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url':", "as e: print(str(i_1) + \" An error with the email : \" +", "'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom':", "{'class': 'company-item-title'}).get('href') # Request the content of a page from the url html_result", "{'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id':", "cursor: try: sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`,", "' no email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites =", "int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages))", "'7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10',", "'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content", "{'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id':", "# {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'},", "{'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id':", "'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom':", "not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1])", "'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom':", "'195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id':", "city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div',", "i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2)", "the content of a page from the url html_result = requests.get(url_result) # Parse", "{'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id':", "'19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22',", "BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" +", "'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai", "test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page from the", "time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is", "'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom':", "'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + \\", "'224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227',", "= \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page from the url html", "'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde =", ".find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class':", "'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url':", "'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom':", "in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1", "'206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209',", "activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record is stored : \"", "print('sorry there is nothing') if number_of_pages > 1: for i in range(2, number_of_pages", "html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class':", "time from bs4 import BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase):", "class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a", "'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale", "'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\ .find('div',", "5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if", "'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų", "{'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id':", "else: print(str(i_1) + ' no email business') else: print('sorry there is nothing') except", "url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content,", "+ \" The record already exists : \" + email) connection.close() except Exception", "'234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try:", "'222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225',", "{'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) +", "'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom':", "1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a", "number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >=", "'210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213',", "'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom':", "'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url':", "{'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id':", "{'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id':", "there is nothing') if number_of_pages > 1: for i in range(2, number_of_pages +", "{'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id':", "None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email)", "i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item", "'221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224',", "[ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url':", "\"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) +", "' no email business') else: print('sorry there is nothing') except Exception as e:", "'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url':", "url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page from the url", "#{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id':", "'183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186',", "print(str(i_1) + \" The record already exists : \" + email) connection.close() except", "= 0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item in", "html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not", "'12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15',", "in activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" +", "\"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search = requests.get(url_search)", "= requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count", "'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199',", ": ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not", "is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection =", "try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity +", "'226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229',", "requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" #", "capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185',", "page from the url html = requests.get(url) # Parse the content of html_doc", "'212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215',", "Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'})", "'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url':", "from bs4 import BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def", "cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql = \"INSERT INTO `emails` (\"", "'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai", "html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email", "= \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search =", "'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'},", "{'itemprop': 'email'}) is not None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email", "'214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217',", "error with the email : \" + email + \" \" + str(e))", "'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom':", "'10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13',", "{'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class':", "elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1", "' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city", "is nothing') except Exception as e: print(\"There is an error connection at url", "= requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if", "'220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223',", "None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email :", "number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div',", ".find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request", "{'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id':", "'14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17',", "'198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201',", "'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url':", "if soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + \\ soup_result.find('a',", "(%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The", "'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'}", "requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a',", "'8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11',", "'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for", "number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1])", "soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email =", "not None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' +", "{'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a page from the url", "+ email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city =", "1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i)", "exists : \" + email) connection.close() except Exception as e: print(str(i_1) + \"", "+ email + \" \" + str(e)) else: print(str(i_1) + ' no email", "for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results)", "'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'},", "'187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190',", "'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198',", "'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom':", "import BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url", "'5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8',", "'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom':", "'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom':", "'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija", "= activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" +", "20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : '", "( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record is stored :", "+ \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser')", "else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1',", "nothing') if number_of_pages > 1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results", "city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search", "= int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) <", "'3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6',", ") / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages", "'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor )", "{'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id':", "\"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='',", "'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'},", "'228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231',", "stored : \" + email) connection.close() except: print(str(i_1) + \" The record already", "connection.close() except: print(str(i_1) + \" The record already exists : \" + email)", "'218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221',", "page from the url html_result = requests.get(url_result) # Parse the content of html_doc", "{'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id':", "'231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234',", "'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom':", "'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai", "{'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė", "number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma =", "+ \" An error with the email : \" + email + \"", "'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'},", "print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages", "#{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id':", "email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost',", "of a page from the url html_result = requests.get(url_result) # Parse the content", "'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom':", "\"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' + email)", "html_result = requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser')", "'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class':", "'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom':", "round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages", "None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) <", "'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom':", "result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 +=", "\" + email) connection.close() except Exception as e: print(str(i_1) + \" An error", "'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom':", "'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom':", "'223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226',", "'1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4',", "'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url':", "result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result", "{'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id':", "= result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a page from the", "'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai", "activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city", "def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url':", "'23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26',", "{'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id':", "'216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219',", "'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys", "VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \"", "'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'},", "'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom':", "\"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page from the url html =", "+= round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class':", "requests.get(url) # Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a',", "soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item in soup_search \\ .find('div',", "'22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25',", "= 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search", "+ city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if", "already exists : \" + email) connection.close() except Exception as e: print(str(i_1) +", "+ ' no email business') else: print('sorry there is nothing') if number_of_pages >", "activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\"", "f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) +", "'company_list'}) is not None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\", "for result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1", "pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the", "# Request the content of a page from the url html_result = requests.get(url_result)", "'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom':", "{'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages +=", "'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė", "str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class':", "'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url':", "'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai", "str(e)) else: print(str(i_1) + ' no email business') else: print('sorry there is nothing')", "is nothing') if number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results =", "db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql = \"INSERT INTO", "'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url':", "= result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a page from", "{'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id':", "%s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record is", "'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'},", "business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" +", "business') else: print('sorry there is nothing') except Exception as e: print(\"There is an", "#{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id':", "+ email) connection.close() except Exception as e: print(str(i_1) + \" An error with", "#{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id':", "try: sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \"", "+ activity + \"/\" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser')", "from the url html_result = requests.get(url_result) # Parse the content of html_doc soup_result", "'26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29',", "Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda", "result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 +=", "None: print(url_search) for result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class':", "'2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5',", "port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql", "'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas", "soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email =", "= capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search =", "else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search", "= \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='',", "{'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id':", "{'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div',", "None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if", "'31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34',", "nothing') if number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search", "'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom':", "'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom':", "'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas", "'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius", "{'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request", "#{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id':", "'24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27',", ": ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages :", "str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages))", "is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if", "{'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id':", "result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a page from the url", "print(str(i_1) + ' email : ' + email) else: print(str(i_1) + ' no", "{'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id':", "'192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195',", "there is nothing') except Exception as e: print(\"There is an error connection at", "' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : '", "= pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as", "if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class':", "soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is", "as e: print(\"There is an error connection at url : \" + str(e))", "\"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='',", "None: email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect(", "'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'},", "'219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222',", "'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'},", "\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2)", "'204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207',", "BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url =", "+ email) connection.close() except: print(str(i_1) + \" The record already exists : \"", "= BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email = \"info@\"", "[ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai", "content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not", "number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\"", "#{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id':", "try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with", "'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'},", "'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom':", "'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis", "except Exception as e: print(\"There is an error connection at url : \"", "'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1", "# Parse the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop':", "html = requests.get(url) # Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser')", "soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class':", "' email : ' + email) else: print(str(i_1) + ' no email business')", "of a page from the url html = requests.get(url) # Parse the content", "'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom':", "'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'},", "'9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12',", "'11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14',", "'27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30',", "'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'}, #{'id': '29', 'url':", "+ ' no email business') else: print('sorry there is nothing') except Exception as", "#{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id':", "= int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages", "'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'},", "'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai", "{'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id':", "'company-item-title'}).get('href') # Request the content of a page from the url html_result =", "email) else: print(str(i_1) + ' no email business') else: print('sorry there is nothing')", "\"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if", "f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages", "is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'} #{'id':", "' no email business') else: print('sorry there is nothing') if number_of_pages > 1:", "no email business') else: print('sorry there is nothing') except Exception as e: print(\"There", "print(\"There is an error connection at url : \" + str(e)) finally: print('done')", "if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' +", "e: print(\"There is an error connection at url : \" + str(e)) finally:", "'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'},", "content of a page from the url html_result = requests.get(url_result) # Parse the", "for activite in activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search =", "\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') #", "print(str(i_1) + ' no email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self):", "{'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id':", "not None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1", "UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page", "e: print(str(i_1) + \" An error with the email : \" + email", "'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom':", "#{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id':", "in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1", "if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item in soup_search \\", "\" + email + \" \" + str(e)) else: print(str(i_1) + ' no", "f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )", "'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'},", "'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'},", "'189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192',", "+ str(e)) else: print(str(i_1) + ' no email business') else: print('sorry there is", "range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2)", ".find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages", "nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'} #{'id': '2',", "{'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id':", "connection.commit() print(str(i_1) + \" The record is stored : \" + email) connection.close()", "requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for", "'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna", "'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom':", "number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results)", "#{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id':", "activity + \"/\" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages", "\\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql,", "0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div',", "'email'}) is not None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email :", "{'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id':", "import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content", "url html = requests.get(url) # Parse the content of html_doc soup = BeautifulSoup(html.content,", "= BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in", "business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id':", "'185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188',", "%s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record", "5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1])", "{'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite in", "'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'},", "'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url':", "business') else: print('sorry there is nothing') if number_of_pages > 1: for i in", "'201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204',", "#{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ]", "{'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id':", "'email'}) is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection", "email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ #", "'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' + email) else: print(str(i_1) + '", "+= 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a", "if soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + soup_result.find('a', {'itemprop':", "of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None:", "import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\"", "\" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'),", "1: for i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\"", "is not None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div',", "'company-item-title'}).get('href') time.sleep(2) # Request the content of a page from the url html_result", "print('sorry there is nothing') if number_of_pages > 1: for i in range(2, number_of_pages+1):", "'13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16',", "1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results)", "'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url':", "soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1]", "'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom':", "host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try:", "= BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\"", "{'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id':", "+ email) else: print(str(i_1) + ' no email business') else: print('sorry there is", "soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item", "{'itemprop': 'email'}) is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try:", "connection.cursor() as cursor: try: sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`, \"", ": \" + email + \" \" + str(e)) else: print(str(i_1) + '", "else: print(str(i_1) + ' no email business') else: print('sorry there is nothing') def", "(\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\"", "for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1", "< 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif", "+ str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search)", ".find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma)", "'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom':", "{'itemprop': 'email'}) is not None: email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1]", "bs4 import BeautifulSoup import requests import pymysql.cursors import unittest class UnitTestsDataMinerYellowPagesLithuania(unittest.TestCase): def test_extract_one_email(self):", "The record already exists : \" + email) connection.close() except Exception as e:", ".find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1", "#{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id':", "+ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels',", "{'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id':", "print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url':", "url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results", "'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom':", ": \" + email) connection.close() except Exception as e: print(str(i_1) + \" An", "'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom':", "+ ' email : ' + email) else: print(str(i_1) + ' no email", "user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql =", "is not None: print(url_search) for result_item in soup_search \\ .find('div', {'class': 'company_list'}) \\", "no email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [", "<gh_stars>1-10 import time from bs4 import BeautifulSoup import requests import pymysql.cursors import unittest", "= BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not", "cursor.execute(sql, ( activite.get('id'), capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record is stored", "'16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19',", "'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + soup_result.find('a',", "'217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220',", "\"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, (", "'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id': '28', 'url': 'store'},", "'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom':", "#{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id':", "if number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search +", ".find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) #", "'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis", "] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id':", "'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results \\", "print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'})", "not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20", "+ str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div',", "'34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184',", "i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\" + str(i)", "connection at url : \" + str(e)) finally: print('done') if __name__ == '__main__':", "= \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306,", "'205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208',", "'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai", "soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count", "'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity", "as cursor: try: sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\", "None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost',", "'30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33',", "range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results =", "pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor:", "the url html = requests.get(url) # Parse the content of html_doc soup =", "'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom':", "{'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor", "= \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' +", "/ 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages :", "#{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'}, #{'id':", "'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis", "{'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'})", "# Request the content of a page from the url html = requests.get(url)", "is nothing') if number_of_pages > 1: for i in range(2, number_of_pages + 1):", "'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom':", "email + \" \" + str(e)) else: print(str(i_1) + ' no email business')", "'207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210',", "#{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id':", "email) connection.close() except: print(str(i_1) + \" The record already exists : \" +", "'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url':", "'200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203',", "'236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite in activites:", "print(str(i_1) + \" An error with the email : \" + email +", "at url : \" + str(e)) finally: print('done') if __name__ == '__main__': unittest.main()", "{'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id':", "soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no email business') def", "'32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde", "print('sorry there is nothing') except Exception as e: print(\"There is an error connection", "{'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id':", "\" The record is stored : \" + email) connection.close() except: print(str(i_1) +", "'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom':", "Request the content of a page from the url html_result = requests.get(url_result) #", "an error connection at url : \" + str(e)) finally: print('done') if __name__", "round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'})", "activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity", "+= round(number_of_pages_with_coma) + 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5:", "else: print('sorry there is nothing') except Exception as e: print(\"There is an error", "'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite", "else: print('sorry there is nothing') if number_of_pages > 1: for i in range(2,", "is not None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : '", "'229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232',", "is an error connection at url : \" + str(e)) finally: print('done') if", "'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys", "'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos", "'email'}) is not None: email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try:", "the content of a page from the url html = requests.get(url) # Parse", "{'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id':", "\" + str(e)) else: print(str(i_1) + ' no email business') else: print('sorry there", "'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for", "'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma", "print('number_of_pages : ' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is", "there is nothing') if number_of_pages > 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results", "'196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id':", "\\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s, %s)\" cursor.execute(sql, ( activite.get('id'), capitale.get('id'),", "#{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id':", "#{'id': '9', 'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id':", "'siluteje'},#Šilutė {'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys", "#{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9', 'url': 'financial'}, #{'id':", "email business') else: print('sorry there is nothing') if number_of_pages > 1: for i", "'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183', 'nom':", "'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas", "'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai", ") with connection.cursor() as cursor: try: sql = \"INSERT INTO `emails` (\" \\", "{'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė {'id': '223', 'nom': 'siluteje'},#Šilutė {'id':", "email) connection.close() except Exception as e: print(str(i_1) + \" An error with the", "except Exception as e: print(str(i_1) + \" An error with the email :", "'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url':", "\" \" + str(e)) else: print(str(i_1) + ' no email business') else: print('sorry", "soup_search \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result", "1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a page", "'232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235',", "{'itemprop': 'email'}) is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1)", "+ \" \" + str(e)) else: print(str(i_1) + ' no email business') else:", "= \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no", "capitales_du_monde: for activite in activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search", "i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of", "'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom':", "int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5:", "the email : \" + email + \" \" + str(e)) else: print(str(i_1)", "'21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24',", "= url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results = requests.get(url_of_one_page_of_results) soup_of_one_page_of_results =", "soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + \\ soup_result.find('a', {'itemprop':", "0 if soup_search.find('div', {'class': 'company_list'}) is not None: print(url_search) for result_item in soup_search", "{'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id':", "else: print(str(i_1) + ' no email business') else: print('sorry there is nothing') if", "'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga", "None: for result_item in soup_of_one_page_of_results \\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}):", ": ' + email) else: print(str(i_1) + ' no email business') else: print('sorry", "'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the", "'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') #", "the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is", "'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina", "time.sleep(2) # Request the content of a page from the url html_result =", "{'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id':", "' + str(number_of_pages)) i_1 = 0 if soup_search.find('div', {'class': 'company_list'}) is not None:", "'6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url': 'association'}, #{'id': '9',", "'search_count f_left'}) .find('span').text ) / 20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma)", "soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a',", "'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url':", "`emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s, %s,", "'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas", "url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search = requests.get(url_search) soup_search", "'20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23',", "'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom':", "activite in activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\"", "soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4',", "'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url':", "url html_result = requests.get(url_result) # Parse the content of html_doc soup_result = BeautifulSoup(html_result.content,", "#{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id':", "\"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content,", "html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email", "is stored : \" + email) connection.close() except: print(str(i_1) + \" The record", "'url': 'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url':", "#{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id':", "is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text ) /", "not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email", "{'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id':", "is not None: email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection", "'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis", "'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'},", ">= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 = 0", "result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a page from the", "'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai", "nothing') except Exception as e: print(\"There is an error connection at url :", "with connection.cursor() as cursor: try: sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`,", "'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom':", "'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom':", "the url html_result = requests.get(url_result) # Parse the content of html_doc soup_result =", "'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url':", "'184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187',", "city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search", "connection.close() except Exception as e: print(str(i_1) + \" An error with the email", "{'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id':", "BeautifulSoup(html_search.content, 'html.parser') number_of_pages = 0 if soup_search.find('div', {'class': 'search_count f_left'}) is not None:", "'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id': '200',", "{'id': '224', 'nom': 'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id':", "'211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė {'id': '214',", "password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql = \"INSERT", "'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom':", "#{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id':", "email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else:", "a page from the url html_result = requests.get(url_result) # Parse the content of", "'nom': 'plungeje'},#Plungė {'id': '214', 'nom': 'prienuose'},#Prienai {'id': '215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom':", "'230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233',", "+ 1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results =", "'company_list'}) is not None: print(url_search) for result_item in soup_search \\ .find('div', {'class': 'company_list'})", "the content of html_doc soup_result = BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is", "i_1 += 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content", "email business') else: print('sorry there is nothing') except Exception as e: print(\"There is", "'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė", "charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor() as cursor: try: sql = \"INSERT INTO `emails`", "'202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205',", "def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity", "BeautifulSoup(html_result.content, 'html.parser') if soup_result.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" +", "= \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search = requests.get(url_search) soup_search =", "{'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id':", "\" An error with the email : \" + email + \" \"", "\"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city", "'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys {'id': '212', 'nom': 'pasvalyje'},#Pasvalys {'id': '213', 'nom': 'plungeje'},#Plungė", "None: for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 +=", "{'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in", "capitale in capitales_du_monde: for activite in activites: try: activity = activite.get(\"url\") city =", "\\ .find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result =", "'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'},", "= [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus {'id': '185', 'nom':", "'4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7',", "'190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193',", "'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė", "'18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21',", "'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas", "#{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'}, #{'id':", "+ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' + email) else:", "'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url':", "'33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [ {'id': '183',", "'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20', 'url':", "email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306,", "'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom': 'panevezyje'},#Panevėžys", "is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + '", "{'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id':", "'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas", ".find('div', {'class': 'company_list'}) \\ .find_all('div', {'class': 'item'}): i_1 += 1 url_result = result_item.find('a',", "'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url':", "{'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id':", "#{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url': 'meat'}, #{'id':", "{'id': '227', 'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id':", "with the email : \" + email + \" \" + str(e)) else:", "for capitale in capitales_du_monde: for activite in activites: try: activity = activite.get(\"url\") city", "'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai", "'locksmith'}, #{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'},", "Exception as e: print(str(i_1) + \" An error with the email : \"", "'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'},", "in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results", "] try: for capitale in capitales_du_monde: for activite in activites: try: activity =", "url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of a page", "'17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'}, #{'id': '20',", "url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') # Request the content of a page from", "'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url':", "'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url':", "'nom': 'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom':", "{'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda", "{'class': 'company_list'}) is not None: print(url_search) for result_item in soup_search \\ .find('div', {'class':", "'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'}, #{'id': '27', 'url': 'accounting'},", "'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32', 'url':", "+ soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no email business')", "\" + email) connection.close() except: print(str(i_1) + \" The record already exists :", "An error with the email : \" + email + \" \" +", "int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages +=", "'29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31', 'url': 'tailor'}, #{'id': '32',", "'nom': 'taurageje'},#Tauragė {'id': '228', 'nom': 'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom':", "+ ' no email business') else: print('sorry there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites", "number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\" + str(i) print(url_of_one_page_of_results) time.sleep(2) html_of_one_page_of_results", "> 1: for i in range(2, number_of_pages+1): url_of_one_page_of_results = url_search + \"/\" +", "#{'id': '3', 'url': 'recruitment'}, #{'id': '4', 'url': 'software'}, #{'id': '5', 'url': 'hotel'}, #{'id':", "'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai", "not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect(", "there is nothing') def test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'}", "'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url':", "\"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE", "int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma) print('number_of_pages : ' + str(number_of_pages)) i_1 =", "1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages += round(number_of_pages_with_coma)", "{'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id':", "if soup_search.find('div', {'class': 'search_count f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class':", "#{'id': '28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id':", "'alytuje'},#Alytus {'id': '185', 'nom': 'anyksciuose'},#Anykščiai {'id': '186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai", "'186', 'nom': 'birstone'},#Birštonas {'id': '187', 'nom': 'birzuose'},#Biržai {'id': '188', 'nom': 'druskininkuose'},#Druskininkai {'id': '189',", "'url': 'financial'}, #{'id': '10', 'url': 'restaurant'}, #{'id': '11', 'url': 'building'}, #{'id': '12', 'url':", "'rietave'},#Rietavas {'id': '218', 'nom': 'rokiskyje'},#Rokiškis {'id': '219', 'nom': 'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai", "in capitales_du_monde: for activite in activites: try: activity = activite.get(\"url\") city = capitale.get(\"nom\")", "capitale.get('id'), email)) connection.commit() print(str(i_1) + \" The record is stored : \" +", "'235', 'nom': 'visagine'},#Visaginas {'id': '236', 'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde:", "= requests.get(url_of_one_page_of_results) soup_of_one_page_of_results = BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None:", "test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search = \"https://www.visalietuva.lt/en/search/\" + activity +", "= requests.get(url) # Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if", "of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None:", "'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id': '19', 'url': 'electricity'},", "'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'},", "'sakiuose'},#Šakiai {'id': '220', 'nom': 'salcininkuose'},#Šalčininkai {'id': '221', 'nom': 'siauliuose'},#Šiauliai {'id': '222', 'nom': 'silaleje'},#Šilalė", "'telsiuose'},#Telšiai {'id': '229', 'nom': 'trakuose'},#Trakai {'id': '230', 'nom': 'ukmergeje'},#Ukmergė {'id': '231', 'nom': 'utenoje'},#Utena", "'sirvintose'},#Širvintos {'id': '225', 'nom': 'skuode'},#Skuodas {'id': '226', 'nom': 'svencionyse'},#Švenčionys {'id': '227', 'nom': 'taurageje'},#Tauragė", "email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\"", "= [ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'}, #{'id': '3',", "'architect'} ] capitales_du_monde = [ {'id': '183', 'nom': 'akmeneje'},#Akmenė {'id': '184', 'nom': 'alytuje'},#Alytus", "{'id': '202', 'nom': 'kupiskyje'},#Kupiškis {'id': '203', 'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id':", "email)) connection.commit() print(str(i_1) + \" The record is stored : \" + email)", "not None: email = \"info@\" + \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection =", "connection = pymysql.connect( host='localhost', port=3306, user='', password='', db='contacts_professionnels', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor ) with connection.cursor()", "BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results\\", "'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202', 'nom': 'kupiskyje'},#Kupiškis", "'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id': '207', 'nom': 'neringoje'},#Neringa {'id': '208', 'nom': 'pagegiuose'},#Pagėgiai", "number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5:", "'233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom': 'visagine'},#Visaginas {'id': '236',", "'nom': 'lazdijuose'},#Lazdijai {'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom':", ": ' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\"", "{'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id':", "'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom':", "{'id': '231', 'nom': 'utenoje'},#Utena {'id': '232', 'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id':", "soup.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1]", "'197', 'nom': 'kazlu-rudoje'},#Kazlų Rūda {'id': '198', 'nom': 'kedainiuose'},#Kėdainiai {'id': '199', 'nom': 'kelmeje'},#Kelmė {'id':", "'nom': 'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite in activites: try:", "{'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava {'id':", "+ \"/\" + city html_search = requests.get(url_search) soup_search = BeautifulSoup(html_search.content, 'html.parser') number_of_pages =", "'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'},", "capitale.get(\"nom\") url_search = \"https://www.visalietuva.lt/en/search/\" + activity + \"/\" + city html_search = requests.get(url_search)", "'215', 'nom': 'radviliskyje'},#Radviliškis {'id': '216', 'nom': 'raseiniuose'},#Raseiniai {'id': '217', 'nom': 'rietave'},#Rietavas {'id': '218',", "'druskininkuose'},#Druskininkai {'id': '189', 'nom': 'elektrenuose'},#Elektrėnai {'id': '190', 'nom': 'ignalinoje'},#Ignalina {'id': '191', 'nom': 'jonavoje'},#Jonava", "+ 1 print('number_of_pages : ' + str(number_of_pages)) elif int(str(number_of_pages_with_coma).split(\".\")[1][:1]) >= 5: number_of_pages +=", "#{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'}, #{'id': '23', 'url': 'bank'}, #{'id':", "'191', 'nom': 'jonavoje'},#Jonava {'id': '192', 'nom': 'joniskyje'},#Joniškis {'id': '193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194',", "# Parse the content of html_doc soup = BeautifulSoup(html.content, 'html.parser') if soup.find('a', {'itemprop':", "'199', 'nom': 'kelmeje'},#Kelmė {'id': '200', 'nom': 'klaipedoje'},#Klaipėda {'id': '201', 'nom': 'kretingoje'},#Kretinga {'id': '202',", "'meat'}, #{'id': '33', 'url': 'library'}, #{'id': '34', 'url': 'architect'} ] capitales_du_monde = [", "'193', 'nom': 'jurbarke'},#Jurbarkas {'id': '194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196',", "'194', 'nom': 'kaisiadoryse'},#Kaišiadorys {'id': '195', 'nom': 'kalvarijoje'},#Kalvarija {'id': '196', 'nom': 'kaune'},#Kaunas {'id': '197',", "'electricity'}, #{'id': '20', 'url': 'plumbing'}, #{'id': '21', 'url': 'security'}, #{'id': '22', 'url': 'lawyer'},", "'28', 'url': 'store'}, #{'id': '29', 'url': 'notary'}, #{'id': '30', 'url': 'jeweller'}, #{'id': '31',", "error connection at url : \" + str(e)) finally: print('done') if __name__ ==", "'nom': 'pagegiuose'},#Pagėgiai {'id': '209', 'nom': 'pakruojyje'},#Pakruojis {'id': '210', 'nom': 'palangoje'},#Palanga {'id': '211', 'nom':", "\"info@\" + soup.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print('email : ' + email) else: print('no email", "soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) + ' email : ' + email) else: print(str(i_1)", "' + email) else: print(str(i_1) + ' no email business') else: print('sorry there", "for i in range(2, number_of_pages + 1): url_of_one_page_of_results = url_search + \"/\" +", "'nom': 'varenoje'},#Varėna {'id': '233', 'nom': 'vilkaviskyje'},#Vilkaviškis {'id': '234', 'nom': 'vilniuje'},#Vilnius {'id': '235', 'nom':", "'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'}, #{'id': '8', 'url':", "'zarasuose'}#Zarasai ] try: for capitale in capitales_du_monde: for activite in activites: try: activity", "print('email : ' + email) else: print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity =", "test_extract_emails_from_all_page_of_results_for_all_activities_and_capitals(self): activites = [ # {'id': '1', 'url': 'labour'} #{'id': '2', 'url': 'real+estate'},", "+= 1 url_result = result_item.find('a', {'class': 'company-item-title'}).get('href') time.sleep(2) # Request the content of", ")/20 if int(str(number_of_pages_with_coma).split(\".\")[1][:1]) < 5: number_of_pages += round(number_of_pages_with_coma) + 1 print('number_of_pages : '", "f_left'}) is not None: number_of_pages_with_coma = int(soup_search .find('div', {'class': 'search_count f_left'}) .find('span').text )/20", "#{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id': '18', 'url': 'moving'}, #{'id':", "The record is stored : \" + email) connection.close() except: print(str(i_1) + \"", "'html.parser') if soup.find('a', {'itemprop': 'email'}) is not None: email = \"info@\" + soup.find('a',", "sql = \"INSERT INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\", "+ \\ soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] try: connection = pymysql.connect( host='localhost', port=3306, user='', password='',", "for result_item in soup_of_one_page_of_results\\ .find('div', {'class': 'company_list'})\\ .find_all('div', {'class': 'item'}): i_1 += 1", "def test_extract_one_email(self): url = \"https://www.visalietuva.lt/en/company/astorija-hotel-uab\" # Request the content of a page from", "if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results \\ .find('div',", "BeautifulSoup(html_of_one_page_of_results.content, 'html.parser') if soup_of_one_page_of_results.find('div', {'class': 'company_list'}) is not None: for result_item in soup_of_one_page_of_results", "except: print(str(i_1) + \" The record already exists : \" + email) connection.close()", "print('no email business') def test_extract_emails_from_all_page_of_results_for_one_activity_and_capital(self): activity = \"hotel\" city = \"vilniuje\" url_search =", "'bank'}, #{'id': '24', 'url': 'garage'}, #{'id': '25', 'url': 'dentist'}, #{'id': '26', 'url': 'doctor'},", "'building'}, #{'id': '12', 'url': 'hairdresser'}, #{'id': '13', 'url': 'florist'}, #{'id': '14', 'url': 'locksmith'},", "{'id': '204', 'nom': 'marijampoleje'},#Marijampolė {'id': '205', 'nom': 'mazeikiuose'},#Mažeikiai {'id': '206', 'nom': 'moletuose'},#Molėtai {'id':", "#{'id': '15', 'url': 'bakery'}, #{'id': '16', 'url': 'insurance'}, #{'id': '17', 'url': 'pharmacy'}, #{'id':", "INTO `emails` (\" \\ \"`id_activite`, \" \\ \"`id_capitale_du_monde`, \" \\ \"`email`) VALUE (%s,", "'software'}, #{'id': '5', 'url': 'hotel'}, #{'id': '6', 'url': 'landlord'}, #{'id': '7', 'url': 'cleaning'},", "'email'}) is not None: email = \"info@\" + soup_result.find('a', {'itemprop': 'email'}).text.split(\"@\")[1] print(str(i_1) +" ]
[ "= DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of", "'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is ' '567bc23fd1ac. Please run `dagster", "re import pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance", "do a check for the existence of an # old runs.db every time", "be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head", "date and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at", "is out of date and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945).", "Please run `dagster instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def", "), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert", "instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out", "reasonable choice since # the runs.db has moved and otherwise we would have", "the upgrade # method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids", "instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids =", "for the existence of an # old runs.db every time we accessed the", "= DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids()", "= instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert os.path.exists(file_relative_path(__file__,", "for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir):", "DagsterInstanceMigrationRequired. This is a reasonable choice since # the runs.db has moved and", "runs and events from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4')", "migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is", "runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and", "must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None,", "' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for run in runs:", "DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is a deliberate choice --", "time we accessed the runs. Instead, we'll do this only in the upgrade", "in the upgrade # method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert", "instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs", "is at revision None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.'", "Instead, we'll do this only in the upgrade # method. assert len(runs) ==", "run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite')", "runs = instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids ==", "run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert", "of date and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is", "len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not", "are simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This is a", "dagster.utils.test import restore_directory # test that we can load runs and events from", "with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1", "# test that we can load runs and events from an old instance", "' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is ' '567bc23fd1ac. Please run", "test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with", "def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db'))", "run `dagster instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc():", "with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance", "file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that", "-- old runs are simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired.", "['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be", "= file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises(", "be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head", "method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with", "import os import re import pytest from dagster import file_relative_path from dagster.core.errors import", "events from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir):", "instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be", "revision None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ):", "runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))", "we'll do this only in the upgrade # method. assert len(runs) == 0", "'89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is ' '567bc23fd1ac. Please run `dagster", "This is a reasonable choice since # the runs.db has moved and otherwise", "is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate():", "Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__,", "migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db'))", "import pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import", "must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None,", "the runs. Instead, we'll do this only in the upgrade # method. assert", "'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))", "invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice", "instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs", "a reasonable choice since # the runs.db has moved and otherwise we would", "import re import pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from", "old runs are simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This", "the existence of an # old runs.db every time we accessed the runs.", "the runs.db has moved and otherwise we would have to do a check", "'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this", "deliberate choice -- old runs are simply invisible, and their # presence won't", "restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is a", "test that we can load runs and events from an old instance def", "for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is ' '567bc23fd1ac.", "instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__,", "an # old runs.db every time we accessed the runs. Instead, we'll do", "their # presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since #", "None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945')", "== 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__,", "'567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir =", "assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises(", "(SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is '", "os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs)", "check for the existence of an # old runs.db every time we accessed", "assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs()", "at revision None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ),", "def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs =", "file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired,", "from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef", "that this is a deliberate choice -- old runs are simply invisible, and", "every time we accessed the runs. Instead, we'll do this only in the", "= instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945']", "test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs()", "len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired,", "is a deliberate choice -- old runs are simply invisible, and their #", "DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date", "' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is ' '567bc23fd1ac. Please run", "choice -- old runs are simply invisible, and their # presence won't raise", "DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert", "match=re.escape( 'Instance is out of date and must be migrated (SqliteEventLogStorage for run", "dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory #", "`dagster instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir", "moved and otherwise we would have to do a check for the existence", "InstanceRef from dagster.utils.test import restore_directory # test that we can load runs and", "won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since # the runs.db has", "= instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must", "os import re import pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired", "is a reasonable choice since # the runs.db has moved and otherwise we", "with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be migrated", "do this only in the upgrade # method. assert len(runs) == 0 run_ids", "run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is ' '567bc23fd1ac. Please", "instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of", "== ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must", "' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir", "DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that", "'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape(", "pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be migrated (SqliteEventLogStorage", "== 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape(", "disable=protected-access import os import re import pytest from dagster import file_relative_path from dagster.core.errors", "dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from", "restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids", "'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert len(runs) ==", "= instance.get_runs() # Note that this is a deliberate choice -- old runs", "0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance", "instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945')", "load runs and events from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__,", "head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for run", "# the runs.db has moved and otherwise we would have to do a", "from dagster.utils.test import restore_directory # test that we can load runs and events", "accessed the runs. Instead, we'll do this only in the upgrade # method.", "and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision", "not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs = instance.get_runs() assert", "a deliberate choice -- old runs are simply invisible, and their # presence", "file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import", "test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs()", "run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is ' '567bc23fd1ac. Please", "this only in the upgrade # method. assert len(runs) == 0 run_ids =", "import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that we can load", "restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is", "has moved and otherwise we would have to do a check for the", "runs.db every time we accessed the runs. Instead, we'll do this only in", "and events from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with", "instance.upgrade() runs = instance.get_runs() assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids", "DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that we can load runs", "'Instance is out of date and must be migrated (SqliteEventLogStorage for run '", "in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance =", "is out of date and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5).", "is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for run in", "we would have to do a check for the existence of an #", "existence of an # old runs.db every time we accessed the runs. Instead,", "since # the runs.db has moved and otherwise we would have to do", "= DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is a deliberate choice", "run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and", "instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir =", "raise DagsterInstanceMigrationRequired. This is a reasonable choice since # the runs.db has moved", "can load runs and events from an old instance def test_0_6_4(): test_dir =", "): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with", "# Note that this is a deliberate choice -- old runs are simply", "simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This is a reasonable", "# old runs.db every time we accessed the runs. Instead, we'll do this", "and otherwise we would have to do a check for the existence of", "old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))", "Note that this is a deliberate choice -- old runs are simply invisible,", "presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since # the runs.db", "assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade()", "import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test", "restore_directory # test that we can load runs and events from an old", "= file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note", "# pylint: disable=protected-access import os import re import pytest from dagster import file_relative_path", "from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory", "# presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since # the", "assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date", "): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not", "runs = instance.get_runs() # Note that this is a deliberate choice -- old", "migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is", "pylint: disable=protected-access import os import re import pytest from dagster import file_relative_path from", "'567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for run in runs: instance.all_logs(run.run_id)", "to do a check for the existence of an # old runs.db every", "for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is ' '567bc23fd1ac.", "of date and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is", "test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with", "(SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is '", "test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() #", "import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test", "dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that we can", "from an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance", "date and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database is at", "upgrade # method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids ==", "None, head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): for", "an old instance def test_0_6_4(): test_dir = file_relative_path(__file__, 'snapshot_0_6_4') with restore_directory(test_dir): instance =", "otherwise we would have to do a check for the existence of an", "= file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance", "assert len(runs) == 1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert", "def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs =", "run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is", "runs are simply invisible, and their # presence won't raise DagsterInstanceMigrationRequired. This is", "file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance =", "have to do a check for the existence of an # old runs.db", "= instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] with pytest.raises( DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out", "run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') with restore_directory(test_dir): instance", "), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite')", "and their # presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since", "and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision", "old runs.db every time we accessed the runs. Instead, we'll do this only", "out of date and must be migrated (SqliteEventLogStorage for run ' '89296095-892d-4a15-aa0d-9018d1580945). Database", "from dagster.core.instance import DagsterInstance, InstanceRef from dagster.utils.test import restore_directory # test that we", "migrate`.' ), ): for run in runs: instance.all_logs(run.run_id) def test_0_6_6_sqlite_exc(): test_dir = file_relative_path(__file__,", "we accessed the runs. Instead, we'll do this only in the upgrade #", "with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is", "we can load runs and events from an old instance def test_0_6_4(): test_dir", "a check for the existence of an # old runs.db every time we", "choice since # the runs.db has moved and otherwise we would have to", "runs.db has moved and otherwise we would have to do a check for", "instance.get_runs() # Note that this is a deliberate choice -- old runs are", "instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) runs = instance.get_runs() # Note that this is a deliberate", "DagsterInstanceMigrationRequired, match=re.escape( 'Instance is out of date and must be migrated (SqliteEventLogStorage for", "only in the upgrade # method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids()", "# method. assert len(runs) == 0 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945']", "instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__,", "instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db'))", "that we can load runs and events from an old instance def test_0_6_4():", "pytest from dagster import file_relative_path from dagster.core.errors import DagsterInstanceMigrationRequired from dagster.core.instance import DagsterInstance,", "1 run_ids = instance._event_storage.get_all_run_ids() assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945'] instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db'))", "Database is at revision None, head is ' '567bc23fd1ac. Please run `dagster instance", "of an # old runs.db every time we accessed the runs. Instead, we'll", "head is ' '567bc23fd1ac. Please run `dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def", "`dagster instance migrate`.' ), ): instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945') def test_0_6_6_sqlite_migrate(): test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert", "os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs", "runs. Instead, we'll do this only in the upgrade # method. assert len(runs)", "would have to do a check for the existence of an # old", "import restore_directory # test that we can load runs and events from an", "out of date and must be migrated (SqliteEventLogStorage for run ' 'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database", "this is a deliberate choice -- old runs are simply invisible, and their", "'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir): instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir)) instance.upgrade() runs =", "test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite') assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db')) assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')) with restore_directory(test_dir):" ]
[ "By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day',", "Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes',", "grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes", "Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category')", "Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size']", "pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft',", "p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p =", "of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv')", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped", "= df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p", "p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday',", "def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped", "p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def", "'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df):", "'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The", "grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size')", "Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p =", "p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p", "matplotlib.pyplot as plt import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels", "100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p =", "Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df)", "grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png')", "By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 *", "df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def", "p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df =", "import matplotlib.pyplot as plt import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes')", "df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time", "'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def", "= grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory')", "= 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes", "labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels,", "p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number')", "def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped", "Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday',", "def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage", "'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png')", "Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size')", "to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove()", "'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped", "Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped =", "Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df)", "Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100", "p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes", "plt import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing',", "of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100", "= grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove()", "Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] =", "By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes')", "= ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%')", "def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes", "def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday',", "Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday',", "= df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "= grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes')", "grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png')", "By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__':", "import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle", "'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of", "grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int)", "p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] /", "p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df) group_by_time_of_day(df)", "grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month')", "plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',", "grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def", "100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage", "grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property',", "pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft',", "'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "* grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped", "group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped =", "= grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df):", "p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove()", "grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of", "group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped =", "grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By", "/ len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage", "def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By", "theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%')", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes')", "labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p", "= 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p", "By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage']", "grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage", "= df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery',", "grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped =", "of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday',", "Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] =", "p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df)", "The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 *", "'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove()", "= ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p =", "Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p", "= df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png')", "df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes',", "/ len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month')", "grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove()", "df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide']", "Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels", "p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if", "p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage']", "Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening',", "= grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped", "'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage", "len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage", "group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year')", "Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%')", "of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes',", "p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of", "Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ ==", "p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'],", "as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General", "grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size')", "p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df):", "Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes')", "grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p = grouped.plot.bar()", "as plt import pandas as pd def group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels =", "group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']", "grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory", "labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png')", "grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped", "= df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index", "Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage", "['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes',", "plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df)", "'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def", "Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__ == '__main__': df", "= grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Time of Day')", "p.set_title('Crimes Percentage Grouped By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped =", "p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] /", "= grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes')", "len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped", "grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size')", "plt.savefig('../charts/day_of_the_week.png') def group_by_month(df): grouped = df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df)", "plt.savefig('../charts/territory.png') if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df) group_by_time_of_day(df) group_by_day_of_the_week(df)", "Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped", "labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Category') p.get_legend().remove() plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped =", "p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage", "group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%') p.set_title('Crimes Percentage", "= df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p =", "df.groupby(['MONTH']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') p =", "'Vehicle theft', 'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels,", "= grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png')", "grouped = grouped.drop(columns='Size') p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of", "df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index =", "'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day", "plt.savefig('../charts/category.png') def group_by_time_of_day(df): grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%')", "Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df): grouped =", "By Time of Day') p.get_legend().remove() plt.savefig('../charts/time_of_day.png') def group_by_day_of_the_week(df): grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes') labels =", "['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes", "= grouped.plot.bar() p.set_title('Crimes Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove()", "* grouped['Size'] / len(df) grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar()", "grouped = grouped.drop(columns='Size') grouped.index = grouped.index.astype(int) p = grouped.plot.bar() p.set_title('Crimes Percentage Grouped By", "p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week')", "Year') p.get_legend().remove() plt.savefig('../charts/year.png') def group_by_territory(df): grouped = df.groupby(['PDQ']).size().to_frame('Size') grouped['Percentage'] = 100 * grouped['Size']", "p.set_title('Crimes Percentage Grouped By Month') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Month') p.get_legend().remove() plt.savefig('../charts/month.png') def group_by_year(df):", "group_by_category(df): grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes') labels = ['Trespassing', 'Vehicle theft', 'General Theft', 'Damage to", "autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped By Day of The Week') p.get_legend().remove() plt.savefig('../charts/day_of_the_week.png') def group_by_month(df):", "plt.savefig('../charts/month.png') def group_by_year(df): grouped = df.groupby(['YEAR']).size().to_frame('Crimes') p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%') p.set_title('Crimes Percentage Grouped", "Percentage Grouped By Territory') p.set_ylabel('Percentage of Crimes') p.set_xlabel('Territory Number') p.get_legend().remove() plt.savefig('../charts/territory.png') if __name__", "'General Theft', 'Damage to Property', 'Robbery', 'Homicide'] p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%') p.set_title('Crimes", "if __name__ == '__main__': df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv') group_by_territory(df) group_by_year(df) group_by_month(df) group_by_time_of_day(df) group_by_day_of_the_week(df) group_by_category(df)" ]
[ "\"rule-x\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "= { \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1,", "= [[0, 4, 5], [0], [1, 4, 5], [1], [2, 4, 5], [2],", "test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize = 4 profile =", "1, 2, 4], [0, 2, 3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\":", "committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile", "import rules_approval # all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys())", "5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1,", "[1, 2, 3, 4], [1, 2, 3, 5]], \"rule-x\": [[0, 1, 4, 5],", "# Unit tests import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval #", "[0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"phrag\":", "rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import", "\"rule-x\" in rule: # Monroe and rule x only work with unit weights:", "import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None)", "msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \"", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3],", "unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def", "[3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]], \"av\": [[0,", "3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0, 2, 3], [1,", "5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phrag\":", "third profile profile = Profile(6) committeesize = 4 preflist = [[0, 3, 4,", "2, 4, 5]], \"seqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2,", "Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]])", "[1, 3], [1, 4], [2, 4], [2, 5], [2, 5]]) committeesize = 3", "[1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5],", "True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule", "import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2],", "3, 4], [1, 2, 3], [1, 3, 4]], \"seqcc\": [[0, 1, 3], [0,", "3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0,", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0,", "committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0]", "rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences", "1, 2, 4]], \"av\": [[0, 1, 2, 4], [0, 2, 3, 4]], \"sav\":", "[1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5)", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\":", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0,", "[0, 1, 4, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0,", "1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1,", "[0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1,", "3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\":", "profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue(", "4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0,", "5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2, 3,", "self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self):", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phrag\": [[0, 1, 4,", "\" failed with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage", "def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules used? for rule", "[0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4,", "\"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"seqcc\": [[0,", "3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1,", "[1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "from preferences import Profile self.longMessage = True # and another profile profile =", "[1, 2, 3], [1, 3, 4]], \"seqcc\": [[0, 1, 3], [0, 2, 3],", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"optphrag\": [[0,", "3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4],", "run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True", "import Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0],", "failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage = True profile", "from preferences import DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0]))", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2,", "[2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule self.longMessage", "= True committeesize = 4 profile = Profile(6) preflist = [[0, 4, 5],", "test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage =", "# implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import", "unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False)", "Monroe score is 6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize,", "[1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def", "6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0,", "self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule,", "4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4, 5], [0,", "tests3 = { \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1, 2, 4],", "3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3,", "5], [2], [0, 1, 2, 3, 4], [0, 3, 4], [0, 2, 4],", "[2, 3, 4, 5]], \"av\": [[0, 1, 4, 5], [0, 2, 4, 5],", "in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import", "2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"seqslav\": [[0,", "[3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": #", "3, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 3,", "4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2,", "Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1,", "is 6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False),", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1,", "preferences import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage = True profile", "2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [0, 3,", "2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0,", "in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from", "1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1,", "3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3],", "shows that tiebreaking is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from", "5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2,", "profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4, 5], [0, 2, 4, 5],", "def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage = True profile =", "2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], }", "preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])),", "tests3) def test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage", "ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2, 3]]) # this", "None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception):", "3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0,", "5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2,", "\"seqslav\": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4],", "5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0,", "profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]]) committeesize = 3 for", "\"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2, 4], [0, 1, 2,", "[0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5], [0,", "\"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1,", "profile = Profile(6) preflist = [[0, 4, 5], [0], [1, 4, 5], [1],", "not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from", "Monroe and rule x only work with unit weights: continue result = rules_approval.compute_rule(rule,", "a third profile profile = Profile(6) committeesize = 4 preflist = [[0, 3,", "committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always", "4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1,", "import Profile from rules_approval import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0],", "1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3)", "2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 2, 3,", "4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]],", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0,", "profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\":", "2], [0, 1, 2], [0, 1, 2], [0, 1], [3, 4], [3, 4],", "def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage = True profile =", "6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile = Profile(5) committeesize", "2, 3], [1, 3, 4]], \"seqcc\": [[0, 1, 3], [0, 2, 3], [0,", "5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from", "True # and another profile profile = Profile(5) committeesize = 3 preflist =", "[1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2,", "4], [1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2,", "[2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4, 5],", "3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0,", "tests1) # and now with reversed preflist preflist.reverse() for p in preflist: p.reverse()", "2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4,", "2, 3, 5], [0, 2, 4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\":", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\":", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5], [1, 3, 4,", "3, 4, 5], [2, 3, 4, 5]], \"av\": [[0, 1, 4, 5], [0,", "[[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0,", "rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize,", "4], [1, 2, 3, 5]], \"seqslav\": [[0, 1, 2, 4], [0, 1, 2,", "5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5],", "[1, 4, 5], [1], [2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist)", "self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4,", "1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1,", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"rule-x\": [[0, 1, 4,", "[1], [2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = {", "[0, 2, 3], [1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1,", "None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5])", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"av\":", "preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and", "compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3],", "= Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys():", "profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2, 4], [2, 5], [2,", "4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]),", "[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0,", "[[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2,", "4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1,", "2, 3, 5]], \"seqslav\": [[0, 1, 2, 4], [0, 1, 2, 5], [0,", "1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2, 4],", "self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile = Profile(5)", "3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "[[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2,", "committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule,", "for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output =", "[0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2,", "[0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3],", "2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0,", "p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from", "2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0,", "5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and now", "2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]],", "= Profile(6) committeesize = 4 preflist = [[0, 3, 4, 5], [1, 2],", "1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"av\": [[0, 1,", "self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\",", "[1, 2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3],", "import rules_approval profile = Profile(5) committeesize = 4 preflist = [[0, 1, 2],", "2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0,", "3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]],", "4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self):", "5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1, 2,", "len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule]", "rule == \"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6,", "3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0,", "msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1,", "4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "4, 5], [0], [1, 4, 5], [1], [2, 4, 5], [2], [3, 4,", "1, 3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0,", "resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2, 3]]) # this test", "2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0,", "3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "4, 5], [0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3,", "2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2,", "== \"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule", "p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def", "1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3],", "[0, 1, 3, 5], [0, 1, 4, 5], [0, 2, 3, 4], [0,", "[[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0,", "3], [0, 2, 3], [1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4,", "import DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1],", "5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5],", "5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2, 3], [0, 1,", "profile = Profile(6) committeesize = 4 preflist = [[0, 3, 4, 5], [1,", "1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]],", "2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3,", "\"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2, 3, 4],", "[0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 3, 4], [1,", "3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]],", "[[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2,", "with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage = True", "4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2,", "[1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0,", "= Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1]))", "import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and a", "4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4],", "1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] +", "[1, 3, 4, 5], [2, 3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]],", "[1, 2, 3, 5]], \"seqslav\": [[0, 1, 2, 4], [0, 1, 2, 5],", "[[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2,", "[[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2,", "1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1,", "1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]],", "committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize", "3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0, 3,", "[0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"seqcc\":", "3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1,", "3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3,", "3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0,", "committeesize, tests): import rules_approval # all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule", "= rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed", "3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule(", "[1, 3, 4, 5], [2, 3, 4, 5]], \"phrag\": [[0, 1, 4, 5],", "[0, 2, 3]]) # this test shows that tiebreaking is not (yet) #", "5], [1], [2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 =", "2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with", "profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True #", "2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]],", "3, 4, 5], [1, 2], [0, 2, 5], [2], [0, 1, 2, 3,", "1, 3]], \"optphrag\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\":", "2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]],", "now with reversed preflist preflist.reverse() for p in preflist: p.reverse() profile = Profile(6)", "4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and now with reversed preflist", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\":", "2, 3, 5], [1, 2, 4, 5]], \"seqcc\": [[0, 1, 2, 3], [0,", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1,", "preferences import Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0],", "\"revseqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences import", "in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile,", "3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]],", "self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 =", "1, 2], [0, 1, 2], [0, 1, 2], [0, 1], [3, 4], [3,", "rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output =", "4, 5]], \"av\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule,", "= Profile(5) committeesize = 3 preflist = [[0, 1, 2], [0, 1, 2],", "self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2,", "3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2, 4], [0, 1,", "[0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"seqslav\":", "3, 4, 5], [2, 3, 4, 5]], \"sav\": [[0, 1, 2, 3], [0,", "continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result))", "3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0, 3,", "4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2, 4]], \"av\":", "5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with", "[[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "3, 4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0,", "[0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1,", "for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2],", "+ \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed", "# Monroe and rule x only work with unit weights: continue result =", "[0, 1, 2], [0, 1, 2], [0, 1], [3, 4], [3, 4], [3]]", "1, 4, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2,", "+ \" failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1,", "5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1,", "1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3],", "[[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with", "[[0, 1], [1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1,", "\"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0, 2, 3], [1, 2,", "= Profile(6) preflist = [[0, 4, 5], [0], [1, 4, 5], [1], [2,", "4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "\"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1,", "4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0,", "4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and", "5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4, 5], [0, 2,", "5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]],", "4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1,", "2, 3], [0, 1, 2, 4], [0, 2, 3, 4], [0, 2, 3,", "resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule, profile,", "tests): import rules_approval # all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in", "1, 3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0,", "4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1,", "3, 5]], \"rule-x\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "[0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [0,", "for ilp in [True, False]: # max Monroe score is 6 (even for", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5], [1, 3,", "\"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1,", "3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage", "2, 3], [1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3],", "resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences", "2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1], [3,", "2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1,", "# all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule", "[1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3],", "\"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1,", "3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0,", "5], [0], [1, 4, 5], [1], [2, 4, 5], [2], [3, 4, 5],", "committeesize = 3 for ilp in [True, False]: # max Monroe score is", "3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3], [0, 3,", "1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1,", "= Profile(5) committeesize = 4 preflist = [[0, 1, 2], [1], [1, 2],", "3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0,", "def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize = 4 profile", "preferences import DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0]))", "5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\":", "4], [1, 2, 3], [1, 3, 4]], \"seqcc\": [[0, 1, 3], [0, 2,", "with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from", "\"av\": [[0, 1, 2, 4], [0, 2, 3, 4]], \"sav\": [[0, 1, 2,", "[0, 2, 3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3],", "2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "Unit tests import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all", "= True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for", "[0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3],", "tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True # and another", "\"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "4, 5], [1], [2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1", "import Profile from preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences(", "[1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "and a third profile profile = Profile(6) preflist = [[0, 1], [1], [1,", "2, 3]]) # this test shows that tiebreaking is not (yet) # implemented", "from preferences import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage = True", "3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4,", "3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1,", "Profile from preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0,", "profile, committeesize, tests1) # and now with reversed preflist preflist.reverse() for p in", "self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize =", "Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and a third", "2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0,", "profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import", "3], [1, 3, 4]], \"seqcc\": [[0, 1, 3], [0, 2, 3], [0, 3,", "1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1,", "2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"phrag\":", "2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"rule-x\": [[0,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1,", "[0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2,", "4], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]],", "5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "= 3 preflist = [[0, 1, 2], [0, 1, 2], [0, 1, 2],", "1, 3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\":", "5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "\"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0,", "5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5],", "from preferences import Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0],", "1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1,", "3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "= Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile", "\"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2, 4],", "import Profile self.longMessage = True # and another profile profile = Profile(5) committeesize", "[[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2,", "5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "and rule x only work with unit weights: continue result = rules_approval.compute_rule(rule, profile,", "profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased,", "[1, 3], [1, 3], [1, 4], [2, 4], [2, 5], [2, 5]]) committeesize", "3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3,", "5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0,", "\"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"phrag\": [[0, 1,", "[[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0,", "= Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]]) committeesize =", "\"av\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule,", "\"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3],", "= { \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1, 2, 4], [0,", "2, 3, 5]], \"rule-x\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "\"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\":", "True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2,", "run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True", "output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \"", "profile, committeesize, tests): import rules_approval # all rules used? for rule in rules_approval.MWRULES:", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"seqcc\": [[0, 1, 2,", "profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences", "Profile self.longMessage = True # and another profile profile = Profile(5) committeesize =", "2, 3, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3,", "5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3], [0,", "profile = Profile(5) committeesize = 4 preflist = [[0, 1, 2], [1], [1,", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"optphrag\":", "committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3],", "\"seqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "[2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2, 4, 5],", "x only work with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1]", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self,", "4], [1, 2, 3, 5], [1, 2, 4, 5], [1, 3, 4, 5],", "5], [0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4],", "[0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\":", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3],", "4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2,", "7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with", "[0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\":", "= { \"seqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"rule-x\": [[0, 1,", "3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]],", "{ \"seqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval", "1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]],", "3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2,", "[2], [0, 1, 2, 3, 4], [0, 3, 4], [0, 2, 4], [0,", "[3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4, 5],", "# and a third profile profile = Profile(6) preflist = [[0, 1], [1],", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1,", "[[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3], [1,", "3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) #", "\"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1,", "[2, 4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize,", "failed with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage =", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"sav\": [[0, 1, 2,", "= 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand]))", "= True # and a third profile profile = Profile(6) preflist = [[0,", "[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1], [3, 4],", "2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1,", "2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0, 2, 3],", "DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5))", "rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule,", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"seqslav\": [[0, 1, 2,", "preflist.reverse() for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize,", "\"sav\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "1, 2, 3, 4], [0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist)", "[0, 1, 3, 5], [0, 2, 3, 4], [0, 2, 3, 5], [1,", "\"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1,", "[[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from", "profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage =", "score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and a third profile profile", "preferences import Profile self.longMessage = True # and a third profile profile =", "3, 5], [0, 2, 4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0,", "import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize", "} run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile from score_functions", "third profile profile = Profile(6) preflist = [[0, 1], [1], [1, 3], [4],", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1,", "only work with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in", "[2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4, 5],", "3, 4, 5], [2, 3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\":", "4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "[1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]),", "4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0,", "[[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self,", "[0, 2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3],", "\"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3],", "work with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result,", "that tiebreaking is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences", "import Profile self.longMessage = True committeesize = 4 profile = Profile(6) preflist =", "4], [0, 1, 3, 5], [0, 2, 3, 4], [0, 2, 3, 5],", "with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self):", "4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0,", "self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__ ==", "True # and a third profile profile = Profile(6) preflist = [[0, 1],", "Profile from rules_approval import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0],", "[0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"revseqcc\":", "[[0, 1, 2, 4]], \"av\": [[0, 1, 2, 4], [0, 2, 3, 4]],", "4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile from", "5], [1, 3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize,", "[0, 1, 2, 3, 4], [0, 3, 4], [0, 2, 4], [0, 1]]", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2,", "for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile,", "[[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0,", "5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "3, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4,", "2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\":", "2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1,", "result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval", "[0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0,", "\"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from", "with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences import", "tests import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules", "[[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "preferences import Profile self.longMessage = True committeesize = 4 profile = Profile(6) preflist", "1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3],", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"seqcc\":", "[[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0,", "[2, 4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\":", "3, 4]], \"seqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1,", "4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2, 3], [0,", "[2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\":", "2, 4, 5], [0, 3, 4, 5], [1, 2, 3, 4], [1, 2,", "def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True # and a third", "2, 3, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2,", "3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]],", "[[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2,", "and another profile profile = Profile(5) committeesize = 3 preflist = [[0, 1,", "[1, 2], [1], [3]]) committeesize = 3 for ilp in [True, False]: #", "+ \" failed with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import rules_approval", "4], [0, 2, 3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1,", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3],", "4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2,", "preflist = [[0, 1], [1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist)", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4,", "test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True # and another profile profile", "prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5,", "profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True", "profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True #", "5], [2, 3, 4, 5]], \"av\": [[0, 1, 4, 5], [0, 2, 4,", "5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "committeesize, tests3) def test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching", "4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1, 3, 4,", "for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \" failed", "2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1,", "[1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]),", "prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile", "[1, 3, 4]], \"seqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]],", "3, 4, 5]], \"phrag\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "[[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def", "committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True # and", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"seqcc\": [[0, 1, 2, 3],", "3], [0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]],", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2,", "[[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2,", "prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception):", "\"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1,", "2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "4]], \"seqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2,", "3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"phrag\": [[0,", "[2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3,", "1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]],", "DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with", "1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self):", "self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self):", "resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in", "resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase):", "[1, 2, 3, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3],", "3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"seqslav\": [[0, 1,", "for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule],", "[True, False]: # max Monroe score is 6 (even for committee [0, 1,", "rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1,", "[[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize,", "\"seqcc\": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4],", "run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules used? for rule in", "from preferences import Profile from preferences import DichotomousPreferences num_cand = 7 prof =", "3, 4, 5]], \"sav\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile from score_functions import", "\"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2, 3, 4],", "from preferences import Profile from rules_approval import compute_rule self.longMessage = True profile =", "1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]],", "[0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1,", "4], [0, 2, 3, 5], [0, 2, 4, 5]], \"phrag\": [[0, 1, 2,", "self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4,", "committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import", "[1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4,", "4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0,", "\"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2, 3], [0, 1, 2,", "\"seqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3],", "2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0,", "\"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "4], [0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = {", "5]], \"phrag\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in", "2, 4], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4,", "Profile(6) preflist = [[0, 4, 5], [0], [1, 4, 5], [1], [2, 4,", "\"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\":", "[0], [1, 2], [1, 2], [1], [3]]) committeesize = 3 for ilp in", "committeesize, resolute=True)), 1, msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self): from preferences", "[1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize)", "2], [0, 1, 2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2", "class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences num_cand", "3, 4], [0, 1, 3, 5], [0, 2, 3, 4], [0, 2, 3,", "= [[0, 3, 4, 5], [1, 2], [0, 2, 5], [2], [0, 1,", "Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]],", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], }", "[0, 2, 3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1,", "tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual(", "4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self,", "= rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output", "[1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3],", "2, 4], [0, 2, 3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0,", "3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and now with reversed", "4 preflist = [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule", "1, 3, 5], [0, 2, 3, 4], [0, 2, 3, 5], [1, 2,", "\"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "(even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1,", "preflist = [[0, 3, 4, 5], [1, 2], [0, 2, 5], [2], [0,", "= True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4],", "[2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]),", "[4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1,", "if \"monroe\" in rule or \"rule-x\" in rule: # Monroe and rule x", "prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0,", "resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences import rules_approval", "2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\":", "\"revseqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3],", "from preferences import Profile import rules_approval profile = Profile(5) committeesize = 4 preflist", "5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule", "[1, 2], [1, 2], [1], [3]]) committeesize = 3 for ilp in [True,", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0,", "import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage = True profile =", "3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2,", "in [True, False]: # max Monroe score is 6 (even for committee [0,", "1, 3], [0, 2, 3]]) # this test shows that tiebreaking is not", "2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"seqcc\": [[0,", "\" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage = True", "Profile import rules_approval profile = Profile(5) committeesize = 4 preflist = [[0, 1,", "3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "[0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0,", "0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1,", "[3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]],", "def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule self.longMessage = True", "= True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1", "\"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "\"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\":", "5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True # and a", "# and now with reversed preflist preflist.reverse() for p in preflist: p.reverse() profile", "[0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"rule-x\":", "[1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "[[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2, 4], [0, 1, 2, 5],", "4]], \"optphrag\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "5]], \"av\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "\"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "[[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0,", "3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4)", "1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1,", "3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1,", "[[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import", "False]: # max Monroe score is 6 (even for committee [0, 1, 3])", "2 for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns one", "[1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "3, 4], [1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1, 3], [0,", "prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2])", "msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-noilp\":", "\"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\":", "run_test_instance(self, profile, committeesize, tests1) # and now with reversed preflist preflist.reverse() for p", "5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "(yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval", "[0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0,", "= 4 preflist = [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for", "5]], \"sav\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile,", "import rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0]))", "\"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile,", "rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\")", "3]], \"seqcc\": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3,", "2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\":", "Profile(6) preflist = [[0, 1], [1], [1, 3], [4], [2], [1, 5, 3]]", "4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2,", "# always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \"", "rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in rule: # Monroe", "3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0,", "profile, committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences", "committeesize = 4 preflist = [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist)", "[2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self):", "[0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"revseqcc\":", "1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1,", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2,", "profile profile = Profile(5) committeesize = 3 preflist = [[0, 1, 2], [0,", "1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1],", "in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in rule: # Monroe and", "3]]) # this test shows that tiebreaking is not (yet) # implemented for", "from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True # and a third profile", "[0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\":", "[1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]),", "2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]],", "4, 5], [2, 3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0,", "3, 4, 5]], \"av\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [0, 3, 4,", "[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0,", "5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5],", "\"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1,", "resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage = True profile", "test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule self.longMessage = True profile", "resolute=True)), 1, msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self): from preferences import", "[2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]),", "msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile import", "3, 5], [0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3,", "2, 3, 4], [1, 2, 3, 5]], \"rule-x\": [[0, 1, 4, 5], [0,", "2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3,", "self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2],", "from preferences import Profile self.longMessage = True committeesize = 4 profile = Profile(6)", "\"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "ilp in [True, False]: # max Monroe score is 6 (even for committee", "[[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2,", "3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"seqcc\": [[0, 1,", "4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\":", "[[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1,", "[1, 2, 3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]],", "1, 2, 3], [0, 1, 2, 4], [0, 2, 3, 4], [0, 2,", "1, 2, 4]], \"optphrag\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \"", "3], [0, 2, 3]]) # this test shows that tiebreaking is not (yet)", "len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage =", "[1, 2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3],", "= 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in", "[2, 3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2,", "preferences import Profile from rules_approval import compute_rule self.longMessage = True profile = Profile(6)", "self.longMessage = True # and another profile profile = Profile(5) committeesize = 3", "committeesize, tests1) # and now with reversed preflist preflist.reverse() for p in preflist:", "[0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4, 5], [1,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"av\": [[0, 1, 4, 5],", "rule: # Monroe and rule x only work with unit weights: continue result", "5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if", "5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0,", "from rules_approval import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1,", "1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2,", "2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\":", "[1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2,", "5], [2, 3, 4, 5]], \"sav\": [[0, 1, 2, 3], [0, 1, 2,", "True committeesize = 4 profile = Profile(6) preflist = [[0, 4, 5], [0],", "2, 5], [2], [0, 1, 2, 3, 4], [0, 3, 4], [0, 2,", "5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5],", "1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3],", "preferences import Profile self.longMessage = True # and another profile profile = Profile(5)", "profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1, 2,", "1, 2, 4], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2,", "[1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0, 2,", "msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage", "[1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "[1, 4], [2, 4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\",", "4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self):", "5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None)", "[1, 2], [0, 2, 5], [2], [0, 1, 2, 3, 4], [0, 3,", "3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1,", "= 2 for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns", "[1, 3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5],", "rules_approval # all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for", "4 preflist = [[0, 3, 4, 5], [1, 2], [0, 2, 5], [2],", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2,", "tests2 = { \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0,", "[[0, 3, 4, 5], [1, 2], [0, 2, 5], [2], [0, 1, 2,", "4], [0, 2, 3, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1,", "4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\":", "tests1 = { \"seqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "5]], \"rule-x\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "and a third profile profile = Profile(6) committeesize = 4 preflist = [[0,", "\"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0,", "= [[0, 1], [1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile,", "rule x only work with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize)", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"sav\":", "[0, 1, 3, 4], [0, 1, 3, 5], [0, 2, 3, 4], [0,", "4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2,", "[0], [1, 4, 5], [1], [2, 4, 5], [2], [3, 4, 5], [3]]", "committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0,", "2, 3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\":", "committeesize = 3 preflist = [[0, 1, 2], [0, 1, 2], [0, 1,", "= DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(),", "3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\":", "preflist = [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in", "5], [0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5],", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1,", "3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0,", "[[0, 1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2,", "\"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1,", "[0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0,", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\":", "DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4)", "[0, 3, 4], [1, 2, 3], [1, 3, 4]], \"seqcc\": [[0, 1, 3],", "[1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1,", "4, 5], [0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3],", "1, 2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3)", "1, 2], [0, 1, 3], [0, 2, 3]]) # this test shows that", "3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0,", "1, 3, 5], [0, 1, 4, 5], [0, 2, 3, 4], [0, 2,", "[1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],", "with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1", "[2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1) # and now with", "3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1,", "4], [0, 1, 3, 5], [0, 1, 4, 5], [0, 2, 3, 4],", "[0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\":", "DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4,", "3 preflist = [[0, 1, 2], [0, 1, 2], [0, 1, 2], [0,", "3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize,", "[[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0,", "1, 2], [0, 1, 2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist)", "+ \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage =", "2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0,", "self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile =", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"av\": [[0,", "2], [0, 2, 5], [2], [0, 1, 2, 3, 4], [0, 3, 4],", "import unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules used?", "\"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0, 1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0,", "reversed preflist preflist.reverse() for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self,", "2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0,", "self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5,", "import Profile self.longMessage = True # and a third profile profile = Profile(6)", "preflist = [[0, 4, 5], [0], [1, 4, 5], [1], [2, 4, 5],", "\"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule +", "[0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1, 2, 4],", "3], [1, 3], [1, 4], [2, 4], [2, 5], [2, 5]]) committeesize =", "3], [1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\": [[0, 1, 3], [0,", "1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2,", "committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True # and", "5]], } run_test_instance(self, profile, committeesize, tests1) # and now with reversed preflist preflist.reverse()", "def test_mwrules_correct_advanced_2(self): from preferences import Profile self.longMessage = True # and another profile", "profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or", "None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences", "2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\":", "[1, 2, 4, 5]], \"seqcc\": [[0, 1, 2, 3], [0, 1, 2, 4],", "= rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self):", "Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if", "profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule", "4, 5], [2, 3, 4, 5]], \"sav\": [[0, 1, 2, 3], [0, 1,", "import monroescore_flowbased, monroescore_matching self.longMessage = True # and a third profile profile =", "4, 5], [2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0,", "[0, 2, 3, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1,", "\"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1,", "4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1,", "5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0,", "\"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1, 2, 4], [0, 2, 3,", "5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "2, 4]], \"optphrag\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule +", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"phrag\": [[0, 1, 3]],", "3, 4], [0, 1, 3, 5], [0, 1, 4, 5], [0, 2, 3,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1,", "[1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile,", "4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1,", "3, 4, 5], [2, 3, 4, 5]], \"phrag\": [[0, 1, 4, 5], [0,", "def test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage =", "\"revseqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "4], [2, 4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile,", "profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import", "with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 =", "from preferences import Profile self.longMessage = True # and a third profile profile", "rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule]", "test_mwrules_correct_simple(self): from preferences import Profile import rules_approval self.longMessage = True profile = Profile(4)", "rules_approval self.longMessage = True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize", "# this test shows that tiebreaking is not (yet) # implemented for opt-Phragmen", "2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3,", "with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"])", "profile profile = Profile(6) committeesize = 4 preflist = [[0, 3, 4, 5],", "[1, 3, 4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5],", "3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1,", "p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from preferences", "1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 1,", "3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1,", "6, msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)),", "self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from preferences import Profile", "= DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3],", "\"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1,", "2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2],", "[2, 3, 4, 5]], \"sav\": [[0, 1, 2, 3], [0, 1, 2, 4],", "4, 5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 4,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile,", "for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1)", "5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 2, 3, 4],", "rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize =", "3, 5], [1, 2, 4, 5]], \"seqcc\": [[0, 1, 2, 3], [0, 1,", "[0, 2, 5], [2], [0, 1, 2, 3, 4], [0, 3, 4], [0,", "[0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3],", "self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1,", "3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0,", "Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys():", "# and a third profile profile = Profile(6) committeesize = 4 preflist =", "3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3,", "3, 4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1, 3], [0,", "weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + \"", "[[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\": [[0,", "2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phrag\": [[0, 1,", "2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0,", "3, 4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1,", "2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1,", "3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\":", "1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\":", "used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output", "1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]],", "[[0, 1, 2, 4], [0, 2, 3, 4]], \"sav\": [[0, 1, 2, 4]],", "Profile self.longMessage = True # and a third profile profile = Profile(6) committeesize", "[0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1,", "self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1,", "[1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3],", "in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] +", "= Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in", "\"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "4, 5]], \"phrag\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1,", "profile profile = Profile(6) preflist = [[0, 1], [1], [1, 3], [4], [2],", "4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "[[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0,", "1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1,", "\" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule +", "[1], [3]]) committeesize = 3 for ilp in [True, False]: # max Monroe", "for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in rule: #", "3, 4]], \"monroe-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\":", "unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule],", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3],", "[0, 2, 4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2,", "2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences import Profile", "2], [1, 2], [1], [3]]) committeesize = 3 for ilp in [True, False]:", "3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0,", "5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5)", "rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def test_mwrules_correct_simple(self): from", "4 profile = Profile(6) preflist = [[0, 4, 5], [0], [1, 4, 5],", "3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0, 2, 3], [1, 2,", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\":", "Profile from preferences import DichotomousPreferences import rules_approval self.longMessage = True profile = Profile(3)", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\":", "2, 3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0,", "committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2, 3]]) #", "5], [2, 3, 4, 5]], \"revseqpav\": [[0, 1, 4, 5], [0, 2, 4,", "test_monroe_indivisible(self): from preferences import Profile import rules_approval self.longMessage = True profile = Profile(4)", "[1, 2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3],", "[1, 3, 4, 5], [2, 3, 4, 5]], \"sav\": [[0, 1, 2, 3],", "2], [0, 1, 3], [0, 2, 3]]) # this test shows that tiebreaking", "1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1,", "self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1,", "2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def", "rules_approval profile = Profile(5) committeesize = 4 preflist = [[0, 1, 2], [1],", "in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self):", "[0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1,", "failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences", "\"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "+ \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule", "4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)),", "3], [0, 2, 3], [1, 2, 3]], \"monroe-noilp\": [[0, 1, 3], [0, 2,", "3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\": [[0, 1,", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"seqcc\": [[0, 1,", "[2, 5], [2, 5]]) committeesize = 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12)", "2], [1], [3]]) committeesize = 3 for ilp in [True, False]: # max", "import Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2],", "committeesize = 4 preflist = [[0, 3, 4, 5], [1, 2], [0, 2,", "= True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1],", "preferences import Profile import rules_approval self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1],", "4, 5]], \"seqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "5], [2, 3, 4, 5]], \"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1,", "\"slav-noilp\": [[0, 1, 3]], \"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], }", "4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5],", "\"seqpav\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "[3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4, 5], [0, 2, 4,", "\"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]], \"rule-x\":", "1, 3], [0, 2, 3], [1, 2, 3]], \"phrag\": [[0, 1, 3]], \"optphrag\":", "test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage = True # and a third profile", "5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4,", "from preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4,", "True profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for", "from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True #", "[1], [2], [3]]) committeesize = 2 for rule in rules_approval.MWRULES.keys(): if rule ==", "preferences import Profile import rules_approval profile = Profile(5) committeesize = 4 preflist =", "2, 3, 4], [1, 2, 3, 5]], \"seqslav\": [[0, 1, 2, 4], [0,", "= True # and another profile profile = Profile(5) committeesize = 3 preflist", "5], [0, 2, 4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1,", "rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] + \" failed with", "self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1,", "1, 2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = {", "5], [1, 2], [0, 2, 5], [2], [0, 1, 2, 3, 4], [0,", "True # and a third profile profile = Profile(6) committeesize = 4 preflist", "+ \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile", "profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2 for rule in", "[0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1,", "[2, 3, 4, 5]], \"phrag\": [[0, 1, 4, 5], [0, 2, 4, 5],", "5]], \"seqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "unittest def run_test_instance(unittestinstance, profile, committeesize, tests): import rules_approval # all rules used? for", "test shows that tiebreaking is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self):", "3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0, 1,", "= True # and a third profile profile = Profile(6) committeesize = 4", "[1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]),", "[1, 3, 4, 5], [2, 3, 4, 5]], } run_test_instance(self, profile, committeesize, tests1)", "\" failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule]", "\"rule-x\": [[0, 1, 3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2)", "2, 4]], \"pav-ilp\": [[0, 1, 2, 4]], \"pav-noilp\": [[0, 1, 2, 4]], \"revseqpav\":", "2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4,", "profile = Profile(6) preflist = [[0, 1], [1], [1, 3], [4], [2], [1,", "4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None)", "profile = Profile(5) committeesize = 3 preflist = [[0, 1, 2], [0, 1,", "[1, 2, 3, 5]], \"rule-x\": [[0, 1, 4, 5], [0, 2, 4, 5],", "3, 4], [1, 2, 3, 5]], \"rule-x\": [[0, 1, 4, 5], [0, 2,", "Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0,", "failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output), 1, msg=rules_approval.MWRULES[rule] +", "[0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"revseqcc\":", "[0], [0], [1, 2], [1, 2], [1], [3]]) committeesize = 3 for ilp", "tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output,", "[[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2,", "2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0,", "3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1,", "2, 4, 5]], \"phrag\": [[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2, 3],", "rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def", "\"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1,", "Profile(6) committeesize = 4 preflist = [[0, 3, 4, 5], [1, 2], [0,", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"sav\": [[0,", "rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \" failed with resolute=True\") def", "3]], \"optphrag\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0,", "2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5], [1, 2,", "2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile", "4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2,", "3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]],", "test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences num_cand = 7 prof", "one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\") for rule", "3, 4], [1, 2, 3], [1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0,", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"pav-noilp\": [[0,", "4]], \"revseqcc\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2,", "self.longMessage = True profile = Profile(4) profile.add_preferences([[0], [1], [2], [3]]) committeesize = 2", "test_monroescore(self): from preferences import Profile from score_functions import monroescore_flowbased, monroescore_matching self.longMessage = True", "3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1, 2, 3], [0, 1,", "4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0,", "5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2,", "3, 4], [0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 =", "2, 4]], \"phragmen-enestroem\": [[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def", "3, 5], [0, 1, 4, 5], [0, 2, 3, 4], [0, 2, 3,", "Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]]) committeesize = 3", "5], [2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4,", "rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in rule: # Monroe and rule", "output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\")", "opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule self.longMessage =", "2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "1, msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self): from preferences import Profile", "[0, 2, 3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2,", "1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile", "\"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-noilp\": [[0,", "[0], [1, 3], [1, 3], [1, 4], [2, 4], [2, 5], [2, 5]])", "[[0, 1, 2, 4]], } run_test_instance(self, profile, committeesize, tests3) def test_monroescore(self): from preferences", "2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\": [[0, 1, 2, 4], [0,", "5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5],", "Profile(5) committeesize = 3 preflist = [[0, 1, 2], [0, 1, 2], [0,", "3, 5], [1, 2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1,", "2, 3], [1, 2, 3]], \"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]],", "returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\") for", "= [[0, 1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys():", "3]], \"phragmen-enestroem\": [[0, 1, 3]], } run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from", "2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2,", "[2, 3, 4, 5]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3)", "2, 4, 5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "[1, 3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5],", "TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences num_cand =", "3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile,", "self.longMessage = True committeesize = 4 profile = Profile(6) preflist = [[0, 4,", "always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\")", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"sav\": [[0, 1,", "in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize, resolute=False) unittestinstance.assertEqual(", "True profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]])", "\"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0, 1, 2, 4]], \"phragmen-enestroem\": [[0, 1,", "5], [1, 2, 4, 5]], \"seqcc\": [[0, 1, 2, 3], [0, 1, 2,", "self.longMessage = True # and a third profile profile = Profile(6) committeesize =", "[0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception):", "1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\" in rule:", "\"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5]) p2", "1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5], [0, 2,", "preflist = [[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1,", "or \"rule-x\" in rule: # Monroe and rule x only work with unit", "= 4 preflist = [[0, 3, 4, 5], [1, 2], [0, 2, 5],", "3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\": [[0, 1,", "output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True)", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"cc-ilp\":", "Profile(5) committeesize = 4 preflist = [[0, 1, 2], [1], [1, 2], [0]]", "in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \" failed with resolute=True\")", "1, 3]], \"minimaxav-noilp\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\":", "1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]],", "[[0, 1, 2, 3], [0, 1, 2, 4], [0, 2, 3, 4], [0,", "4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0, 1, 4, 5], [0, 2,", "\"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\":", "2, 3]], \"greedy-monroe\": [[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0,", "4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "2, 3, 4], [0, 3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3", "= [[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2],", "4, 5], [1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4,", "failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \"", "{ \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]],", "= 4 profile = Profile(6) preflist = [[0, 4, 5], [0], [1, 4,", "3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\": [[0, 1, 3]],", "max Monroe score is 6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile,", "= 3 for ilp in [True, False]: # max Monroe score is 6", "2, 4]], \"av\": [[0, 1, 2, 4], [0, 2, 3, 4]], \"sav\": [[0,", "1, 3], [0, 2, 3], [1, 2, 3]], \"minimaxav-ilp\": [[0, 1, 3], [0,", "2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"phrag\": [[0,", "with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class", "3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1,", "[[0, 1, 2], [0, 1, 3], [0, 2, 3]]) # this test shows", "3, 4], [1, 2, 3, 5]], \"revseqcc\": [[0, 1, 2, 3]], \"monroe-ilp\": [[0,", "4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4,", "3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4],", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5], [1,", "implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile from rules_approval import compute_rule", "result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule + \" failed\"+str(result)) def", "[0, 1, 2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 =", "rules_approval.compute_monroe(profile, committeesize, ilp=ilp, resolute=False), [[0, 1, 2], [0, 1, 3], [0, 2, 3]])", "4], [0, 2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2,", "4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "\"sav\": [[0, 1, 3]], \"pav-ilp\": [[0, 1, 3]], \"pav-noilp\": [[0, 1, 3]], \"revseqpav\":", "1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1,", "3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"phrag\": [[0, 1,", "\" failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with", "monroescore_matching self.longMessage = True # and a third profile profile = Profile(6) preflist", "is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import Profile", "1, 2, 3]], \"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]],", "another profile profile = Profile(5) committeesize = 3 preflist = [[0, 1, 2],", "2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2,", "4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\": [[0, 1, 2, 4]], \"rule-x\": [[0,", "5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0, 2, 4,", "profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\"", "3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3], [0, 1,", "in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule,", "# max Monroe score is 6 (even for committee [0, 1, 3]) self.assertEqual(", "[[0, 4, 5], [0], [1, 4, 5], [1], [2, 4, 5], [2], [3,", "tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences", "def test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile = Profile(5) committeesize =", "self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage", "2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0,", "resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize =", "4) self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__", "3, 4], [0, 2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0,", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2,", "rules_approval import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3],", "[[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2, 3,", "profile = Profile(4) profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]]) committeesize", "[[0, 2, 3, 4]], \"slav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4],", "5], [1, 2, 4, 5]], \"greedy-monroe\": [[0, 1, 2, 3]], \"seqslav\": [[0, 1,", "3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_matching(profile,", "4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]], \"av\":", "profile, committeesize, resolute=True)), 1, msg=rule + \" failed with resolute=True\") def test_monroe_indivisible(self): from", "2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"revseqcc\": [[0,", "[0, 1, 3], [0, 2, 3]]) # this test shows that tiebreaking is", "3]], \"cc-ilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2,", "3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2,", "= 3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import", "5], [2, 3, 4, 5]], \"phrag\": [[0, 1, 4, 5], [0, 2, 4,", "4], [0, 2, 3, 5], [0, 2, 4, 5]], \"revseqcc\": [[0, 1, 2,", "4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4], [1, 2,", "} run_test_instance(self, profile, committeesize, tests1) # and now with reversed preflist preflist.reverse() for", "[[0, 1, 2, 4]], \"optphrag\": [[0, 1, 2, 3], [0, 1, 2, 4],", "committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\") for rule in", "\"phrag\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "} run_test_instance(self, profile, committeesize, tests2) def test_mwrules_correct_advanced_3(self): from preferences import Profile self.longMessage =", "4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2,", "self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import", "rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize, resolute=True)), 1, msg=rule + \" failed with", "[0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0,", "4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 2, 3,", "2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]], \"slav-noilp\":", "1, 2, 4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2,", "12) def test_mwrules_correct_advanced_1(self): from preferences import Profile self.longMessage = True committeesize = 4", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phragmen-enestroem\": [[0,", "2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0,", "rule or \"rule-x\" in rule: # Monroe and rule x only work with", "[3]]) committeesize = 3 for ilp in [True, False]: # max Monroe score", "4, 5]], \"sav\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "3, 4, 5], [2, 3, 4, 5]], \"pav-ilp\": [[0, 1, 4, 5], [0,", "1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2, 4]],", "\" failed with resolute=True\") class TestApprovalMultiwinner(unittest.TestCase): def test_createprofiles(self): from preferences import Profile from", "3 for ilp in [True, False]: # max Monroe score is 6 (even", "2, 3]], \"seqcc\": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1,", "# and another profile profile = Profile(5) committeesize = 3 preflist = [[0,", "[1, 2, 3, 4], [1, 2, 3, 5]], \"seqslav\": [[0, 1, 2, 4],", "this test shows that tiebreaking is not (yet) # implemented for opt-Phragmen def", "3, 5], [0, 2, 4, 5], [1, 2, 3, 4], [1, 2, 3,", "rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys(): output = rules_approval.compute_rule(rule, profile, committeesize,", "2], [0, 1], [3, 4], [3, 4], [3]] profile.add_preferences(preflist) tests2 = { \"seqpav\":", "[3]] profile.add_preferences(preflist) tests2 = { \"seqpav\": [[0, 1, 3]], \"av\": [[0, 1, 2]],", "failed with resolute=True\") unittestinstance.assertTrue( output[0] in tests[rule], msg=rules_approval.MWRULES[rule] + \" failed with resolute=True\")", "in rule: # Monroe and rule x only work with unit weights: continue", "2, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5], [1, 2,", "rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns one committee continue", "Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2, 4], [2, 5],", "3, 5]], \"seqslav\": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1,", "Profile self.longMessage = True committeesize = 4 profile = Profile(6) preflist = [[0,", "3, 2]), 5) self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4)", "import Profile import rules_approval profile = Profile(5) committeesize = 4 preflist = [[0,", "{ \"seqpav\": [[0, 1, 2, 4]], \"av\": [[0, 1, 2, 4], [0, 2,", "4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2,", "4, 5], 2.4)) self.assertFalse(prof.has_unit_weights()) self.assertEqual(prof.totalweight(), 6.4) def test_mwrules__toofewcandidates(self): from preferences import Profile import", "[1, 2, 3, 4], [1, 2, 3, 5], [1, 2, 4, 5]], \"greedy-monroe\":", "2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 2, 3,", "import compute_rule self.longMessage = True profile = Profile(6) profile.add_preferences([[0], [0], [1, 3], [1,", "monroescore_flowbased, monroescore_matching self.longMessage = True # and a third profile profile = Profile(6)", "preferences import Profile from preferences import DichotomousPreferences num_cand = 7 prof = Profile(num_cand)", "5]], \"seqslav\": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3,", "self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with", "[2, 3, 4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4, 5],", "[1, 3, 4]], \"cc-noilp\": [[0, 1, 3], [0, 2, 3], [0, 3, 4],", "\"optphrag\": [[0, 1, 2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1,", "def test_createprofiles(self): from preferences import Profile from preferences import DichotomousPreferences num_cand = 7", "prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception):", "5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "5], [0, 1, 4, 5], [0, 2, 3, 4], [0, 2, 3, 5],", "[0, 1, 2, 4], [0, 2, 3, 4], [0, 2, 3, 5], [0,", "1], [1], [1, 3], [4], [2], [1, 5, 3]] profile.add_preferences(preflist) self.assertEqual(monroescore_flowbased(profile, [1, 3,", "self.longMessage = True # and a third profile profile = Profile(6) preflist =", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"revseqpav\": [[0,", "5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4, 5]) p1 = DichotomousPreferences([0, 4, 5])", "def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences import DichotomousPreferences import rules_approval self.longMessage", "2, 4], [0, 1]] profile.add_preferences(preflist) tests3 = { \"seqpav\": [[0, 1, 2, 4]],", "unittestinstance.assertEqual( output, tests[rule], msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule, profile, committeesize,", "committeesize = 1 for rule in rules_approval.MWRULES.keys(): if \"monroe\" in rule or \"rule-x\"", "4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__ == '__main__': unittest.main()", "[2], [3, 4, 5], [3]] profile.add_preferences(preflist) tests1 = { \"seqpav\": [[0, 1, 4,", "= Profile(6) preflist = [[0, 1], [1], [1, 3], [4], [2], [1, 5,", "[[0, 1, 3]], \"av\": [[0, 1, 2]], \"sav\": [[0, 1, 3]], \"pav-ilp\": [[0,", "4]], \"av\": [[0, 1, 2, 4], [0, 2, 3, 4]], \"sav\": [[0, 1,", "2, 4, 5]], \"revseqcc\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile,", "3], [1, 4], [2, 4], [2, 5], [2, 5]]) committeesize = 3 self.assertEqual(", "4, 5]], \"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3,", "[2, 4, 5]), 3) self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3) if __name__ == '__main__':", "tiebreaking is not (yet) # implemented for opt-Phragmen def test_optphrag_notiebreaking(self): from preferences import", "[[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1, 3],", "5) self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4) self.assertEqual(monroescore_flowbased(profile, [2,", "2, 4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0,", "4], [1, 2, 3, 5]], \"rule-x\": [[0, 1, 4, 5], [0, 2, 4,", "4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"optphrag\": [[0, 1,", "committeesize)), 6, msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize,", "[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"revseqpav\":", "4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5],", "\"optphrag\": [[0, 1, 3], [0, 2, 3], [1, 2, 3]], \"cc-ilp\": [[0, 1,", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"phrag\": [[0,", "\"greedy-monroe\": [[0, 1, 3]], \"seqslav\": [[0, 1, 3]], \"slav-ilp\": [[0, 1, 3]], \"slav-noilp\":", "3 self.assertEqual( len(compute_rule(\"optphrag\", profile, committeesize, resolute=False)), 12) def test_mwrules_correct_advanced_1(self): from preferences import Profile", "3, 5]], \"slav-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1,", "in rules_approval.MWRULES.keys(): with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize) with self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True)", "\"pav-noilp\": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],", "5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4, 5, \"1\"]) with self.assertRaises(Exception): prof.add_preferences([\"1\", 0, 4,", "\"monroe\" in rule or \"rule-x\" in rule: # Monroe and rule x only", "with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0,", "profile = Profile(3) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule", "= Profile(6) profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4], [2, 4], [2,", "1, 2, 3]], \"seqcc\": [[0, 1, 2, 4], [0, 1, 2, 5], [0,", "profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([0])) profile.add_preferences(DichotomousPreferences([1], 5)) profile.add_preferences(DichotomousPreferences([0])) committeesize = 1 for rule in rules_approval.MWRULES.keys(): if", "a third profile profile = Profile(6) preflist = [[0, 1], [1], [1, 3],", "2, 4, 5]], \"monroe-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)), 6, msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys():", "5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4))", "5], [1, 2, 3, 4], [1, 2, 3, 5]], \"slav-noilp\": [[0, 1, 2,", "[1, 2, 3, 5], [1, 2, 4, 5]], \"monroe-ilp\": [[0, 1, 2, 3],", "[0, 2, 3, 5], [0, 2, 4, 5]], \"phrag\": [[0, 1, 2, 4]],", "for rule in rules_approval.MWRULES.keys(): if rule == \"greedy-monroe\": # always returns one committee", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"optphrag\": [[0, 1, 2,", "4]], \"revseqpav\": [[0, 1, 2, 4]], \"minimaxav-noilp\": [[0, 1, 2, 3], [0, 1,", "4, 5], [2, 3, 4, 5]], \"av\": [[0, 1, 4, 5], [0, 2,", "num_cand = 7 prof = Profile(num_cand) self.assertEqual(prof.add_preferences( DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception):", "preflist preflist.reverse() for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile,", "5], [0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 3, 4],", "DichotomousPreferences([0, 4, 5])), None) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]),", "rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in tests.keys():", "1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5], [0, 2,", "1, 2], [1], [1, 2], [0]] profile.add_preferences(preflist) for rule in rules_approval.MWRULES.keys(): with self.assertRaises(Exception):", "test_mwrules__toofewcandidates(self): from preferences import Profile import rules_approval profile = Profile(5) committeesize = 4", "if rule == \"greedy-monroe\": # always returns one committee continue self.assertEqual(len(rules_approval.compute_rule(rule, profile, committeesize)),", "[[0, 1, 2, 3]], \"seqslav\": [[0, 1, 2, 4]], \"slav-ilp\": [[0, 1, 2,", "[0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3, 4]], \"monroe-ilp\":", "3, 4, 5]], \"minimaxav-ilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0,", "3], [0, 1, 2, 4], [0, 2, 3, 4], [0, 2, 3, 5],", "in rule or \"rule-x\" in rule: # Monroe and rule x only work", "self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([num_cand])) with self.assertRaises(Exception): prof.add_preferences(DichotomousPreferences([-1])) self.assertEqual(prof.add_preferences([0, 4, 5]), None) with self.assertRaises(Exception): prof.add_preferences([0, 4,", "5]], \"cc-noilp\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2,", "2, 3]], \"cc-ilp\": [[0, 1, 2, 3]], \"cc-noilp\": [[0, 1, 2, 3]], \"seqcc\":", "3, 4], [1, 2, 3, 5]], \"seqslav\": [[0, 1, 2, 4], [0, 1,", "profile, committeesize)), 6, msg=rule + \" failed\") for rule in rules_approval.MWRULES.keys(): self.assertEqual(len(rules_approval.compute_rule(rule, profile,", "\"monroe-ilp\": [[0, 1, 2, 3]], \"monroe-noilp\": [[0, 1, 2, 3]], \"greedy-monroe\": [[0, 2,", "\"optphrag\": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],", "4, 5], [2, 3, 4, 5]], \"phrag\": [[0, 1, 4, 5], [0, 2,", "score is 6 (even for committee [0, 1, 3]) self.assertEqual( rules_approval.compute_monroe(profile, committeesize, ilp=ilp,", "with reversed preflist preflist.reverse() for p in preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist)", "committeesize = 4 profile = Profile(6) preflist = [[0, 4, 5], [0], [1,", "p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights()) prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4)) self.assertFalse(prof.has_unit_weights())", "2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5]], \"minimaxav-ilp\": [[0,", "2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]], \"minimaxav-noilp\": [[0,", "preflist: p.reverse() profile = Profile(6) profile.add_preferences(preflist) run_test_instance(self, profile, committeesize, tests1) def test_mwrules_correct_advanced_2(self): from", "1, 3], [0, 2, 3], [0, 3, 4], [1, 2, 3], [1, 3,", "p1 = DichotomousPreferences([0, 4, 5]) p2 = DichotomousPreferences([1, 2]) self.assertEqual(prof.add_preferences([p1, p2]), None) self.assertTrue(prof.has_unit_weights())", "all rules used? for rule in rules_approval.MWRULES: unittestinstance.assertTrue(rule in tests.keys()) for rule in", "msg=rules_approval.MWRULES[rule] + \" failed\") output = rules_approval.compute_rule( rule, profile, committeesize, resolute=True) unittestinstance.assertEqual( len(output),", "1, 3, 4], [0, 1, 3, 5], [0, 2, 3, 4], [0, 2,", "2, 3, 4]], \"sav\": [[0, 1, 2, 4]], \"pav-ilp\": [[0, 1, 2, 4]],", "and now with reversed preflist preflist.reverse() for p in preflist: p.reverse() profile =", "with unit weights: continue result = rules_approval.compute_rule(rule, profile, committeesize) self.assertTrue([1] in result, msg=rule", "4], [0, 1, 2, 5], [0, 2, 3, 4], [0, 2, 3, 5],", "4, 5], [1, 2], [0, 2, 5], [2], [0, 1, 2, 3, 4],", "self.assertRaises(Exception): rules_approval.compute_rule(rule, profile, committeesize, resolute=True) def test_mwrules_weightsconsidered(self): from preferences import Profile from preferences", "5], [1, 3, 4, 5], [2, 3, 4, 5]], \"av\": [[0, 1, 4," ]
[ "== ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month)", "'139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509',", "'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio']", "index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str}", "portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[", "conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() )", "index_df['date'].str[:6].unique().tolist() # The list of months that we need for calculating market cap", "'139807', '139808', '139809', '139810', '139811', '139812' ] # The list of months that", "(merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split", "ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio']", "= pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] #", "to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded", "portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size)", "Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11],", "= pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] +", "'139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610',", "all 75 tickers' data me_list = [] for file_number in range(1, 76): print(file_number)", "portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions,", "non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last()", "portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio", "rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path,", "price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN", "labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict()", "df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df =", "'139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606',", "me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')]", "= [] for month in mom_months: # Check t-13 price condition and t-1", "return for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price'])", "= portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s =", "= (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh)", "]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for", "data for calculating returns close_list = [] for file_number in range(1, 76): rahavard_path", "pd import numpy as np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview", "[ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size", "list of all months months = index_df['date'].str[:6].unique().tolist() # The list of months that", "Split each me portfolio into 3 MOM group q = [0, .3, .7,", "ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[", "jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1,", "'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date']", ") index_df.dropna(inplace=True) # The list of all months months = index_df['date'].str[:6].unique().tolist() # The", "= pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers", "# Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number)", "Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh =", "portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size']", "for calculating returns close_list = [] for file_number in range(1, 76): rahavard_path =", "add 2 index to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates", "ME into two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), (", "portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist()", "df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df", "= ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom =", "]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio", "dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'],", "me_df, on=['ticker_num', 'date']) # First, create a NaN column, and then add t-13", "'139900')] # Read index df for indicating open market days index_path = r'E:\\Thesis\\New", "'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') &", "and add it to a list mom = ( ((sh_return + bh_return) /", "'139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708',", "<reponame>behnoud-bazrafshan/ThesisPortfolio import pandas as pd import numpy as np import jdatetime pd.options.mode.chained_assignment =", "= np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate", "file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx'", "me_list = [] for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily", "skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from", "'139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701',", "pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read", "1 ) mom_list = [] for month in mom_months: # Check t-13 price", "month t # Set conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh)", "'139809', '139810', '139811', '139812' ] # The list of months that we need", "MOM mom_months = me_months[1:] # Merge market cap and price dfs merged_df =", "df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d')", "portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in month", "monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df", "'size', portfolio_size) # Split each me portfolio into 3 MOM group q =", "'139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604',", "'139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating returns close_list =", "(df['date'] <= '139900')] # Read index df for indicating open market days index_path", "= [ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410',", "& bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio", "names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem, pandas", "shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe", "& sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return =", "Convert to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create", "pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s,", "ticker in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] ==", "= pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN column, and then", "'open', 'market_cap'], na_values='-' ) # Change order from old to new dates me_df", "t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price =", "q = [0, .3, .7, 1] labels = ['L', 'M', 'H'] x_b =", "= (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price", "need for calculating market cap me_months = [ '139312', '139401', '139402', '139403', '139404',", "df for indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df =", "into 3 MOM group q = [0, .3, .7, 1] labels = ['L',", "& (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating", "in mom_months: # Find t-13 prices for ticker in range(1, 76): t_13 =", "previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion &", "sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) #", "'139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506',", "portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into 3 MOM group q", "(merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition", "75 tickers' data me_list = [] for file_number in range(1, 76): print(file_number) me_path", "days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df", ") ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each", "t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: # Find t-13", "month in mom_months: # Check t-13 price condition and t-1 market cap condition", "portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh =", "(me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating returns", "dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem, pandas add 2 index", "== month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition =", "months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker)", "na_values='-' ) # Change order from old to new dates me_df = me_df[::-1].reset_index(drop=True)", "3 MOM group q = [0, .3, .7, 1] labels = ['L', 'M',", "bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return", "Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) #", "pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all", "/ merged_df['t-13 price']) - 1 ) mom_list = [] for month in mom_months:", ") # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num',", "and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months:", "b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom", "merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition]", "market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'],", "mom_months: # Find t-13 prices for ticker in range(1, 76): t_13 = months[months.index(month)", "'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q,", "( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl", "dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN column,", "== 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[", "merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return", "merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio", "cap # Concat all 75 tickers' data me_list = [] for file_number in", "for ticker in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date']", "all months months = index_df['date'].str[:6].unique().tolist() # The list of months that we need", "'139806', '139807', '139808', '139809', '139810', '139811', '139812' ] # The list of months", "me_months = [ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409',", "price', np.nan) for month in mom_months: # Find t-13 prices for ticker in", "= ( ((sh_return + bh_return) / 2) - ((sl_return + bl_return) / 2)", "q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q,", "'139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703',", "<= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating returns close_list", "pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio']", "condition and t-1 market cap condition previous_month = months[months.index(month) - 1] me_condition =", "months return for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13", "bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio =", "'139212') & (df['date'] <= '139900')] # Read index df for indicating open market", "'139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808',", "# Read Bourseview data for market cap # Concat all 75 tickers' data", "str}, parse_dates=[0] ) # Solve index reading problem, pandas add 2 index to", "each month ME into two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median()", "'139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809',", "First, create a NaN column, and then add t-13 prices merged_df.insert(5, 't-13 price',", "portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl =", "merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios", "index reading problem, pandas add 2 index to the df df.reset_index(drop=True, inplace=True) #", "names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all months months =", "range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df =", "need for camculating MOM mom_months = me_months[1:] # Merge market cap and price", "[ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411',", "[] for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data -", "'139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802',", "Merge market cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) #", "month in mom_months: # Find t-13 prices for ticker in range(1, 76): t_13", "كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The", "usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading", "mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month ME", "column, and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in", "== 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) #", "MOM group q = [0, .3, .7, 1] labels = ['L', 'M', 'H']", "weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to a list mom =", "= df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') &", "None # Read Bourseview data for market cap # Concat all 75 tickers'", "sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each", "data me_list = [] for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New", ">= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data", "== 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted", "return for each portfolio in month t # Set conditions month_condition = (merged_df['date']", "'139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812'", "me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1,", "Check t-13 price condition and t-1 market cap condition previous_month = months[months.index(month) -", "jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for market cap # Concat", "inplace=True) # Read rahavard 365 data for calculating returns close_list = [] for", "in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13)", "# Check t-13 price condition and t-1 market cap condition previous_month = months[months.index(month)", "]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "'139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503',", "Concat all 75 tickers' data me_list = [] for file_number in range(1, 76):", "open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1],", "(merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion", "'139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602',", "price' ] = t_13_price except: pass # Calculate last 12 months return for", "labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom", "numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() #", "# Set conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition =", "to a list mom = ( ((sh_return + bh_return) / 2) - ((sl_return", "market cap # Concat all 75 tickers' data me_list = [] for file_number", "== previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except: pass", "pandas as pd import numpy as np import jdatetime pd.options.mode.chained_assignment = None #", "me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' )", "'139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612',", "f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-'", "file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date']", "me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list,", "'139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805',", "np.nan) for month in mom_months: # Find t-13 prices for ticker in range(1,", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist()", "value-weighted return for each portfolio in month t # Set conditions month_condition =", "bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate", "= np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to a", "range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df =", "weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap", "merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: # Find t-13 prices for", "'139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603',", "'t-13 price', np.nan) for month in mom_months: # Find t-13 prices for ticker", "= merged_df.loc[me_condition & mom_condition] # Split each month ME into two groups conditions", "labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = (", "the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply( lambda", "indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path,", "pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True)", "'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict()", "ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) -", "df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df =", "'139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502',", "'139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803',", "for market cap # Concat all 75 tickers' data me_list = [] for", "'139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711',", "t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion", "df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df)", "= pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) #", "]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return']", "sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL'", "Set conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl)", "previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna())", "t-13 prices for ticker in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion", "x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom']", "price']) - 1 ) mom_list = [] for month in mom_months: # Check", "sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return =", "of months that we need for calculating market cap me_months = [ '139312',", "'t-13 price' ] = t_13_price except: pass # Calculate last 12 months return", "and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a", "r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True)", "a list mom = ( ((sh_return + bh_return) / 2) - ((sl_return +", "mom_list = [] for month in mom_months: # Check t-13 price condition and", "bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) #", "df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df =", "bh_return) / 2) - ((sl_return + bl_return) / 2) ) mom_list.append(mom) mom_df =", "<= '139900')] # Read index df for indicating open market days index_path =", "numpy as np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for", "= me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard", "mom_months: # Check t-13 price condition and t-1 market cap condition previous_month =", "'139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707',", "] # The list of months that we need for camculating MOM mom_months", "dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all months months = index_df['date'].str[:6].unique().tolist()", "in month t # Set conditions month_condition = (merged_df['date'] == month) bh_condition =", "= (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month", "df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] )", "we need for calculating market cap me_months = [ '139312', '139401', '139402', '139403',", "merged_df['t-13 price']) - 1 ) mom_list = [] for month in mom_months: #", "Calculate last 12 months return for month t (t-1, t-12) merged_df['past_year_return'] = (", ">= '139212') & (df['date'] <= '139900')] # Read index df for indicating open", "portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split", "'139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710',", "weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add", "x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index()", ".3, .7, 1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] ==", "'139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605',", "f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0]", "'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect", "np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into 3", "portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH'", "calculating returns close_list = [] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New", "me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365", "index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list", "(merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except:", "= merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition", "index to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] =", "cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create", "me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df =", "= me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create", "t # Set conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition", ") bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap )", "# Read rahavard 365 data for calculating returns close_list = [] for file_number", "dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df =", "== previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each", "= ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist()", "pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve", "76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition =", "pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN column, and then add", "in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df", "me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >=", "365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str},", "x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s", "merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition]", "list of months that we need for calculating market cap me_months = [", "1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return']", "= me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition),", "months that we need for camculating MOM mom_months = me_months[1:] # Merge market", "[0, .3, .7, 1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size']", "'139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801',", "it to a list mom = ( ((sh_return + bh_return) / 2) -", "then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: #", "portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio']", "create a NaN column, and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan)", "portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio =", "usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from old", "'139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ] # The list", "np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to a list", "df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read index df for indicating", "me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) #", "ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) #", "['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b,", "1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ]", "'139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704',", "dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df", "portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom))", "df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')]", "= merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition]", "= me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True)", "'139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706',", "] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me", "'139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ]", "add it to a list mom = ( ((sh_return + bh_return) / 2)", "13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price", "Calculate MOM, and add it to a list mom = ( ((sh_return +", "on=['ticker_num', 'date']) # First, create a NaN column, and then add t-13 prices", "# The list of months that we need for camculating MOM mom_months =", "= (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh", "problem, pandas add 2 index to the df df.reset_index(drop=True, inplace=True) # Convert to", "that we need for camculating MOM mom_months = me_months[1:] # Merge market cap", "11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from old to new", "df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read index df", "of months that we need for camculating MOM mom_months = me_months[1:] # Merge", "bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio", "'139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806',", "merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition &", "Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date',", "'139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702',", "each me portfolio into 3 MOM group q = [0, .3, .7, 1]", "in mom_months: # Check t-13 price condition and t-1 market cap condition previous_month", "> portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B',", "= f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2,", "'139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508',", "pass # Calculate last 12 months return for month t (t-1, t-12) merged_df['past_year_return']", "two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <=", "= df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df", "import numpy as np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data", "'') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe", "as np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for market", "market cap condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month)", "'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S'", "in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df", "dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change()", "header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem, pandas add 2", "# Concat all 75 tickers' data me_list = [] for file_number in range(1,", "= months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] ==", "to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly", "The list of months that we need for camculating MOM mom_months = me_months[1:]", "Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return,", ") sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap )", "]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in month t # Set", "np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM,", "= np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return =", "# The list of months that we need for calculating market cap me_months", "price condition and t-1 market cap condition previous_month = months[months.index(month) - 1] me_condition", "list mom = ( ((sh_return + bh_return) / 2) - ((sl_return + bl_return)", "Solve index reading problem, pandas add 2 index to the df df.reset_index(drop=True, inplace=True)", "t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1 )", "'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem, pandas add", "(merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month ME into two", "[] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard", "old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete", "# Change order from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] =", "tickers' data me_list = [] for file_number in range(1, 76): print(file_number) me_path =", "- 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price'", "# Find t-13 prices for ticker in range(1, 76): t_13 = months[months.index(month) -", "Read Bourseview data for market cap # Concat all 75 tickers' data me_list", "me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True)", "usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all months months", "we need for camculating MOM mom_months = me_months[1:] # Merge market cap and", "list of months that we need for camculating MOM mom_months = me_months[1:] #", "previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except: pass #", "me portfolio into 3 MOM group q = [0, .3, .7, 1] labels", "= df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read index df for", "me_months[1:] # Merge market cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num',", "'139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504',", "file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >=", "= df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last()", "portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month ME into two groups", "try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1]", "= me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date']", "'139808', '139809', '139810', '139811', '139812' ] # The list of months that we", "'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio']", "returns close_list = [] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily", "prices for ticker in range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion =", "), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6,", "weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap", "The list of all months months = index_df['date'].str[:6].unique().tolist() # The list of months", "mom_months = me_months[1:] # Merge market cap and price dfs merged_df = pd.merge(df,", "# Convert to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) #", "& bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] #", "= (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] #", ") bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: # Find t-13 prices", "'139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511',", "group q = [0, .3, .7, 1] labels = ['L', 'M', 'H'] x_b", "months months = index_df['date'].str[:6].unique().tolist() # The list of months that we need for", "= pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of", "# Calculate last 12 months return for month t (t-1, t-12) merged_df['past_year_return'] =", "days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date':", "'139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512',", "t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num']", "previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month", "index df for indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df", "3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from old to", "= merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion =", "# Read index df for indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص", "df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list,", "for month in mom_months: # Check t-13 price condition and t-1 market cap", ") # Solve index reading problem, pandas add 2 index to the df", "inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index()", ".7, 1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[ portfo_const_df['size'] == 'B'", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl", "import pandas as pd import numpy as np import jdatetime pd.options.mode.chained_assignment = None", "ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <= '139900')] # Read index", "np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average(", "inplace=True) # Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num',", "= df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True)", "as pd import numpy as np import jdatetime pd.options.mode.chained_assignment = None # Read", "'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return", "month ME into two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ),", "# First, create a NaN column, and then add t-13 prices merged_df.insert(5, 't-13", "# Split each me portfolio into 3 MOM group q = [0, .3,", "of all months months = index_df['date'].str[:6].unique().tolist() # The list of months that we", "['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into 3 MOM", "f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3,", "me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly", "that we need for calculating market cap me_months = [ '139312', '139401', '139402',", "t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list =", "for each portfolio in month t # Set conditions month_condition = (merged_df['date'] ==", "value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap", "market cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First,", "merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition", "= months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df", "sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to a list mom", "Calculating value-weighted return for each portfolio in month t # Set conditions month_condition", "'139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709',", "76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv(", "portfolio_size) # Split each me portfolio into 3 MOM group q = [0,", "market cap me_months = [ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407',", "parse_dates=[0] ) # Solve index reading problem, pandas add 2 index to the", "and t-1 market cap condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date']", "portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating", "== 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in month t", "+ bh_return) / 2) - ((sl_return + bl_return) / 2) ) mom_list.append(mom) mom_df", "portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in", "pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom']", "f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7],", "& ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month)", "+ portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[", "merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list = []", "portfo_const_df.loc[ portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[", "month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1", "last 12 months return for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close']", "Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df)", "# Calculate MOM, and add it to a list mom = ( ((sh_return", "mom = ( ((sh_return + bh_return) / 2) - ((sl_return + bl_return) /", "portfo_const_df['size'] + portfo_const_df['mom'] ) bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl =", "'139811', '139812' ] # The list of months that we need for camculating", "into two groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap']", "close_list = [] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data", "t-13 price condition and t-1 market cap condition previous_month = months[months.index(month) - 1]", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist()", "q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] =", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist()", "& ticker_condition), 't-13 price' ] = t_13_price except: pass # Calculate last 12", "merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except: pass # Calculate", "merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date']", "t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13 price' ] =", "'139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609', '139610', '139611',", "Data - Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'],", "(merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list = [] for month in", "Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date':", "'139612', '139701', '139702', '139703', '139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712',", "except: pass # Calculate last 12 months return for month t (t-1, t-12)", "portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom)", "df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212')", "np import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for market cap", "# Solve index reading problem, pandas add 2 index to the df df.reset_index(drop=True,", "'139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ] # The", "= t_13_price except: pass # Calculate last 12 months return for month t", "str} ) index_df.dropna(inplace=True) # The list of all months months = index_df['date'].str[:6].unique().tolist() #", "= None # Read Bourseview data for market cap # Concat all 75", "= portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] =", "bh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL'", "= merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns", "= pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True,", "] = t_13_price except: pass # Calculate last 12 months return for month", "[] for month in mom_months: # Check t-13 price condition and t-1 market", "365 data for calculating returns close_list = [] for file_number in range(1, 76):", "camculating MOM mom_months = me_months[1:] # Merge market cap and price dfs merged_df", "from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') #", "- 1 ) mom_list = [] for month in mom_months: # Check t-13", "= pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) #", "'139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501',", "month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition =", "'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio in month t #", "'139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810',", "= merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition &", "Split each month ME into two groups conditions = [ ( portfo_const_df['market_cap'] >", "& mom_condition] # Split each month ME into two groups conditions = [", "portfolio into 3 MOM group q = [0, .3, .7, 1] labels =", "rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index", "mom_condition] # Split each month ME into two groups conditions = [ (", "<= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) #", "((sh_return + bh_return) / 2) - ((sl_return + bl_return) / 2) ) mom_list.append(mom)", "- 1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition", "1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition &", "me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212')", "months = index_df['date'].str[:6].unique().tolist() # The list of months that we need for calculating", "= (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition] # Split each month ME into", "for month in mom_months: # Find t-13 prices for ticker in range(1, 76):", "'date']) # First, create a NaN column, and then add t-13 prices merged_df.insert(5,", ") # Change order from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date']", "calculating market cap me_months = [ '139312', '139401', '139402', '139403', '139404', '139405', '139406',", "The list of months that we need for calculating market cap me_months =", "rahavard 365 data for calculating returns close_list = [] for file_number in range(1,", "- Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open',", "7], names=['date', 'close'], header=0, dtype={'date': str}, parse_dates=[0] ) # Solve index reading problem,", "Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio", "= [0, .3, .7, 1] labels = ['L', 'M', 'H'] x_b = portfo_const_df.loc[", "Find t-13 prices for ticker in range(1, 76): t_13 = months[months.index(month) - 13]", "( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size = np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size',", "import jdatetime pd.options.mode.chained_assignment = None # Read Bourseview data for market cap #", "sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return,", "months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df =", "me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <=", "for indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel(", "'market_cap'], na_values='-' ) # Change order from old to new dates me_df =", "portfolio in month t # Set conditions month_condition = (merged_df['date'] == month) bh_condition", "index_df.dropna(inplace=True) # The list of all months months = index_df['date'].str[:6].unique().tolist() # The list", "'139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for", "condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition =", "to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply(", "sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and", "s_mom = pd.qcut(x=x_s, q=q, labels=labels).to_dict() portfo_const_df['mom'] = pd.Series(b_mom) portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker", "print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path,", "sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it to", "= me_months[1:] # Merge market cap and price dfs merged_df = pd.merge(df, me_df,", "( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ] portfolio_size =", "'139605', '139606', '139607', '139608', '139609', '139610', '139611', '139612', '139701', '139702', '139703', '139704', '139705',", "'139507', '139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607',", "# Merge market cap and price dfs merged_df = pd.merge(df, me_df, on=['ticker_num', 'date'])", "- 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try:", "'139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510', '139511', '139512', '139601',", "close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date'] <=", "( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list = [] for month", "# Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df", "conditions month_condition = (merged_df['date'] == month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition", "= r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls' index_df = pd.read_excel( index_path, usecols=[1], names=['date'], dtype={'date': str} )", "bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return = np.average( sl_portfolio.monthly_return,", "2 index to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date']", "sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted", "portfo_const_df['mom'].update(pd.Series(s_mom)) # Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] )", "reading problem, pandas add 2 index to the df df.reset_index(drop=True, inplace=True) # Convert", "( ((sh_return + bh_return) / 2) - ((sl_return + bl_return) / 2) )", "pd.options.mode.chained_assignment = None # Read Bourseview data for market cap # Concat all", "inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply( lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') )", "each portfolio in month t # Set conditions month_condition = (merged_df['date'] == month)", "order from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '')", "'139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507', '139508', '139509', '139510',", "index_path, usecols=[1], names=['date'], dtype={'date': str} ) index_df.dropna(inplace=True) # The list of all months", "new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days", "NaN column, and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month", "range(1, 76): t_13 = months[months.index(month) - 13] t_13_condtion = (merged_df['date'] == t_13) ticker_condition", "(t_1_condtion & ticker_condition), 't-13 price' ] = t_13_price except: pass # Calculate last", "Bourseview data for market cap # Concat all 75 tickers' data me_list =", "groups conditions = [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median()", "bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition", "= [ ( portfo_const_df['market_cap'] > portfo_const_df['market_cap'].median() ), ( portfo_const_df['market_cap'] <= portfo_const_df['market_cap'].median() ) ]", "bl_portfolio = merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition", "/ 2) - ((sl_return + bl_return) / 2) ) mom_list.append(mom) mom_df = pd.Series(mom_list).to_excel('mom.xlsx')", "Read rahavard 365 data for calculating returns close_list = [] for file_number in", "= np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return =", "# Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average(", "me_condition = (merged_df['date'] == previous_month) mom_condition = (merged_df['past_year_return'].notna()) portfo_const_df = merged_df.loc[me_condition & mom_condition]", "'139812' ] # The list of months that we need for camculating MOM", "me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion & ticker_condition), 't-13", "'139704', '139705', '139706', '139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804',", "bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return,", "me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7,", "merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap )", "== t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion &", "# Extrect portfolio ticker numbers portfo_const_df['portfolio'] = ( portfo_const_df['size'] + portfo_const_df['mom'] ) bh", "sh_condition] sl_portfolio = merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return = np.average(", "= pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] == 'S' ]['past_year_return'] s_mom =", "a NaN column, and then add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for", "pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change", "# Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio = merged_df.loc[month_condition & bl_condition]", "df df.reset_index(drop=True, inplace=True) # Convert to shamsi dates df['date'] = df['date'].apply( lambda x:", "bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH'", "'139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811',", "ticker_condition), 't-13 price' ] = t_13_price except: pass # Calculate last 12 months", "== 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size'] ==", "= merged_df.loc[month_condition & bl_condition] sh_portfolio = merged_df.loc[month_condition & sh_condition] sl_portfolio = merged_df.loc[month_condition &", "for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\", "12 months return for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] /", "me_df.reset_index(drop=True, inplace=True) # Read rahavard 365 data for calculating returns close_list = []", "# The list of all months months = index_df['date'].str[:6].unique().tolist() # The list of", "Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'],", "= np.select(conditions, ['B', 'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into", "file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\ f'{file_number}.txt'", "Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return']", "monthly dataframe df = df.groupby(df['date'].str[:6]).last() df = df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] =", "'139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412',", "'139802', '139803', '139804', '139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ] #", "merged_df = pd.merge(df, me_df, on=['ticker_num', 'date']) # First, create a NaN column, and", "me_path, skiprows=7, usecols=[2, 3, 11], names=['date', 'open', 'market_cap'], na_values='-' ) # Change order", "ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0]", "(merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition ]['close'].values[0] previous_month =", "portfo_const_df['size'] == 'B' ]['past_year_return'] b_mom = pd.qcut(x=x_b, q=q, labels=labels).to_dict() x_s = portfo_const_df.loc[ portfo_const_df['size']", "'139508', '139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608',", "merged_df.loc[me_condition & mom_condition] # Split each month ME into two groups conditions =", "= me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open',", "add t-13 prices merged_df.insert(5, 't-13 price', np.nan) for month in mom_months: # Find", "df.drop(columns=['date']).reset_index() df.insert(1, 'ticker_num', file_number) df['monthly_return'] = df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df", "df['close'].pct_change() close_list.append(df) df = pd.concat(close_list, ignore_index=True) df = df.loc[(df['date'] >= '139212') & (df['date']", "'139509', '139510', '139511', '139512', '139601', '139602', '139603', '139604', '139605', '139606', '139607', '139608', '139609',", "& sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return", "'139810', '139811', '139812' ] # The list of months that we need for", "month) bh_condition = merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl)", "= merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio =", "= portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SL' ]['ticker_num'].tolist() # Calculating value-weighted return for each portfolio", ") mom_list = [] for month in mom_months: # Check t-13 price condition", "= f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2,", "# Calculating value-weighted return for each portfolio in month t # Set conditions", "for camculating MOM mom_months = me_months[1:] # Merge market cap and price dfs", ") # Calculate MOM, and add it to a list mom = (", "]['ticker_num'].tolist() sh = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'SH' ]['ticker_num'].tolist() sl = portfo_const_df.loc[ portfo_const_df['portfolio'] ==", "== 'BH' ]['ticker_num'].tolist() bl = portfo_const_df.loc[ portfo_const_df['portfolio'] == 'BL' ]['ticker_num'].tolist() sh = portfo_const_df.loc[", "names=['date', 'open', 'market_cap'], na_values='-' ) # Change order from old to new dates", "me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df =", "= [] for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data -", "me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number) me_list.append(me_df) me_df = pd.concat(me_list, ignore_index=True) me_df", "t_13_condtion & ticker_condition ]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] ==", "'139707', '139708', '139709', '139710', '139711', '139712', '139801', '139802', '139803', '139804', '139805', '139806', '139807',", "data for market cap # Concat all 75 tickers' data me_list = []", "sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition &", "'S']).tolist() portfo_const_df.insert(6, 'size', portfolio_size) # Split each me portfolio into 3 MOM group", "np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average(", "'139405', '139406', '139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505',", "Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel( me_path, skiprows=7, usecols=[2, 3, 11], names=['date',", "- Rahavard 365/'\\ f'{file_number}.txt' df = pd.read_csv( rahavard_path, usecols=[2, 7], names=['date', 'close'], header=0,", "pandas add 2 index to the df df.reset_index(drop=True, inplace=True) # Convert to shamsi", "MOM, and add it to a list mom = ( ((sh_return + bh_return)", "= merged_df.loc[month_condition & sl_condition] # Calculate value-weighted returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap", "'139407', '139408', '139409', '139410', '139411', '139412', '139501', '139502', '139503', '139504', '139505', '139506', '139507',", "Change order from old to new dates me_df = me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-',", "me_df = me_df.loc[(me_df['date'] >= '139212') & (me_df['date'] <= '139900')] me_df.reset_index(drop=True, inplace=True) # Read", "= index_df['date'].str[:6].unique().tolist() # The list of months that we need for calculating market", "lambda x: jdatetime.date.fromgregorian(date=x).strftime('%Y%m%d') ) # Create monthly dataframe df = df.groupby(df['date'].str[:6]).last() df =", "me_df[::-1].reset_index(drop=True) me_df['date'] = me_df['date'].str.replace('-', '') # Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True)", "(t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list", "t-1 market cap condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] ==", "= [] for file_number in range(1, 76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data", "Read index df for indicating open market days index_path = r'E:\\Thesis\\New Sampling\\TEDPIX\\شاخص كل6.xls'", "for month t (t-1, t-12) merged_df['past_year_return'] = ( (merged_df['close'] / merged_df['t-13 price']) -", "= ( (merged_df['close'] / merged_df['t-13 price']) - 1 ) mom_list = [] for", "bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap ) sh_return = np.average( sh_portfolio.monthly_return, weights=sh_portfolio.market_cap ) sl_return", "months that we need for calculating market cap me_months = [ '139312', '139401',", "cap me_months = [ '139312', '139401', '139402', '139403', '139404', '139405', '139406', '139407', '139408',", "returns bh_return = np.average( bh_portfolio.monthly_return, weights=bh_portfolio.market_cap ) bl_return = np.average( bl_portfolio.monthly_return, weights=bl_portfolio.market_cap )", "76): print(file_number) me_path = f'E:/Thesis/New Sampling/Daily Data - Bourseview/'\\ f'{file_number}.xlsx' me_df = pd.read_excel(", "'139805', '139806', '139807', '139808', '139809', '139810', '139811', '139812' ] # The list of", "for file_number in range(1, 76): rahavard_path = f'E:/Thesis/New Sampling/Daily Data - Rahavard 365/'\\", "Delete non-traded days me_df.dropna(subset=['open'], inplace=True) me_df.drop(columns='open', inplace=True) # Create monthly dataframe me_df =", "t_13) ticker_condition = (merged_df['ticker_num'] == ticker) try: t_13_price = merged_df.loc[ t_13_condtion & ticker_condition", "# Split each month ME into two groups conditions = [ ( portfo_const_df['market_cap']", "]['close'].values[0] previous_month = me_months[me_months.index(month) - 1] t_1_condtion = (merged_df['date'] == previous_month) merged_df.loc[ (t_1_condtion", "for calculating market cap me_months = [ '139312', '139401', '139402', '139403', '139404', '139405',", "t_13_price except: pass # Calculate last 12 months return for month t (t-1,", "# Create monthly dataframe me_df = me_df.groupby(me_df['date'].str[:6]).last() me_df = me_df.drop(columns=['date']).reset_index() me_df.insert(1, 'ticker_num', file_number)", "& (df['date'] <= '139900')] # Read index df for indicating open market days", ") sl_return = np.average( sl_portfolio.monthly_return, weights=sl_portfolio.market_cap ) # Calculate MOM, and add it", "sl_condition = merged_df['ticker_num'].isin(sl) # Construct portfolios bh_portfolio = merged_df.loc[month_condition & bh_condition] bl_portfolio =", "= merged_df['ticker_num'].isin(bh) bl_condition = merged_df['ticker_num'].isin(bl) sh_condition = merged_df['ticker_num'].isin(sh) sl_condition = merged_df['ticker_num'].isin(sl) # Construct", "cap condition previous_month = months[months.index(month) - 1] me_condition = (merged_df['date'] == previous_month) mom_condition" ]
[ "s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html", "s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix", "Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId']", "urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as e: print('send(..) failed executing", "responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json', }", "responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {}", "raise Exception('ERROR: http error') except Exception as err: print(err) raise Exception('ERROR: ' +", "= {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url", "raise Exception('ERROR: ' + err) print('INFO: ' + filename + ' was downloaded')", "req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code:", "in event: if status == 401: response = {'status': 'invalide_license_key'} else: response =", "response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type':", "try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to", "mmdb = directory + '/' + filename + '.mmdb' s3obj = s3key_prefix +", "s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded", "failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status", "suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data", "= 'None' if event: print(json.dumps(event)) try: for filename in put_files: status = download_file(filename)", "'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os", "{'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in event:", "responseStatus response_body['Reason'] = ('See the details in CloudWatch Log Stream: ' '' +", "in event: response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event", "urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.' + suffix)", "'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try:", "checksum is different. download is failed') return False with tarfile.open('/tmp/' + filename +", "= event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n'", "as err: if err.status == 401: return err.status print(err) raise Exception('ERROR: http error')", "filename + '.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb,", "= s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0}", "= {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res", "+ '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with", "Checksum: ' + checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as f:", "# get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key']", "event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details", "+ filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: ' +", "context): physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for filename", "e: print(e) if event and 'RequestType' in event: response = {'failed_reason': e} send(event,", "err) print('INFO: ' + filename + ' was downloaded') return 200 def put_to_s3(filename):", "open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: '", "url + data, filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError as", "import os import tarfile import urllib.error import urllib.parse import urllib.request import boto3 #", "requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None'", "def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0]", "with open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if", "in put_files: status = download_file(filename) if status == 401: break put_to_s3(filename) except Exception", "Inc. or its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__ =", "'.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj))", "event: if status == 401: response = {'status': 'invalide_license_key'} else: response = {'status':", "'RequestType' in event: if status == 401: response = {'status': 'invalide_license_key'} else: response", "= json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json', } req =", "Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__", "= os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda", "status = download_file(filename) if status == 401: break put_to_s3(filename) except Exception as e:", "import urllib.request import boto3 # get var from lambda environment try: s3bucket_name =", "physicalResourceId) if event and 'RequestType' in event: if status == 401: response =", "urllib.request import boto3 # get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name']", "except KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/')", "except Exception as err: print(err) raise Exception('ERROR: ' + err) print('INFO: ' +", "filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data,", "status == 401: break put_to_s3(filename) except Exception as e: print(e) if event and", "+ mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event,", "str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..): ' + str(e)) def", "'.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR:", "= f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename + '.tar.gz',", "json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request(", "url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in", "401: return err.status print(err) raise Exception('ERROR: http error') except Exception as err: print(err)", "return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory =", "download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix':", "download_file(filename) if status == 401: break put_to_s3(filename) except Exception as e: print(e) if", "suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename +", "response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId']", "filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum)", "res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as e: print('send(..)", "context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId']", "if checksum not in calcurated_checksum: print('ERROR: checksum is different. download is failed') return", "Exception as e: print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context):", "json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except", "filename + '.' + suffix) except urllib.error.HTTPError as err: if err.status == 401:", "json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT')", "' + err) print('INFO: ' + filename + ' was downloaded') return 200", "f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename", "tf.extractall(path='/tmp/') mmdb = directory + '/' + filename + '.mmdb' s3obj = s3key_prefix", "send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in event: if status", "values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve(", "urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError", "<reponame>aws-samples/siem-on-amazon-opensearch-service<filename>source/lambda/geoip_downloader/index.py # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier:", "checksum not in calcurated_checksum: print('ERROR: checksum is different. download is failed') return False", "' + filename + ' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/'", "200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum =", "print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'],", "'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req)", "import json import os import tarfile import urllib.error import urllib.parse import urllib.request import", "http error') except Exception as err: print(err) raise Exception('ERROR: ' + err) print('INFO:", "'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename", "s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL']", "' 'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>'", "in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data =", "response, physicalResourceId) if event and 'RequestType' in event: if status == 401: response", "mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context,", "'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City',", "tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb", "event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData", "= noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers", "affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ =", "suffix) except urllib.error.HTTPError as err: if err.status == 401: return err.status print(err) raise", "str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event))", "print('ERROR: checksum is different. download is failed') return False with tarfile.open('/tmp/' + filename", "+ context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] =", "calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is different. download", "( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId']", "__copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.') __version__", "get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name)", "os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files =", "as f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' +", "{'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url +", "urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' +", "with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/')", "Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3')", "f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is different.", "+ suffix) except urllib.error.HTTPError as err: if err.status == 401: return err.status print(err)", "'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' +", "Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service'", "' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if", "or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] =", "= '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib", "== 401: response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context,", "= boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country']", "+ '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory +", "if status == 401: break put_to_s3(filename) except Exception as e: print(e) if event", "environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url =", "urllib.error import urllib.parse import urllib.request import boto3 # get var from lambda environment", "send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body", "response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType'", "= directory + '/' + filename + '.mmdb' s3obj = s3key_prefix + filename", "as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename", "for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix}", "directory + '/' + filename + '.mmdb' s3obj = s3key_prefix + filename +", "'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for filename in put_files: status", "= event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body =", "False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name", "import tarfile import urllib.error import urllib.parse import urllib.request import boto3 # get var", "= ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values =", "= 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import", "license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix", "if event: print(json.dumps(event)) try: for filename in put_files: status = download_file(filename) if status", "headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try:", "as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is", "bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def", "put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO:", "f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename + '.tar.gz', 'rb')", "' + str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..): ' +", "+ '/' + filename + '.mmdb' s3obj = s3key_prefix + filename + '.mmdb'", "# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0", "data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.'", "boto3 # get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key =", "for filename in put_files: status = download_file(filename) if status == 401: break put_to_s3(filename)", "Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. '", "is failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf:", "print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb'", "hashlib import json import os import tarfile import urllib.error import urllib.parse import urllib.request", "print(err) raise Exception('ERROR: http error') except Exception as err: print(err) raise Exception('ERROR: '", "print(err) raise Exception('ERROR: ' + err) print('INFO: ' + filename + ' was", "== 401: break put_to_s3(filename) except Exception as e: print(e) if event and 'RequestType'", "'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename,", "= responseStatus response_body['Reason'] = ('See the details in CloudWatch Log Stream: ' ''", "context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho", "response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body", "'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile import urllib.error import urllib.parse", "filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError as err: if err.status", "tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename +", "as e: print(e) if event and 'RequestType' in event: response = {'failed_reason': e}", "Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates.", "s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN',", "as e: print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId", "+ str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..): ' + str(e))", "= urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as e: print('send(..) failed", "# SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. ' 'All", "failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as tf: directory", "lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR:", "{'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res =", "print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as", "checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/' + filename +", "or its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0'", "import urllib.error import urllib.parse import urllib.request import boto3 # get var from lambda", "to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket =", "401: break put_to_s3(filename) except Exception as e: print(e) if event and 'RequestType' in", "response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data']", "SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights", "+ data, filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError as err:", "if status == 401: response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'}", "{} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in CloudWatch Log Stream:", "'2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import", "' + checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum", "https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] =", "__version__ = '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import", "from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise", "def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event)) try:", "with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum:", "== 401: return err.status print(err) raise Exception('ERROR: http error') except Exception as err:", "noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus", "Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright", "boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def", "event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body)", "context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body =", "except Exception as e: print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event,", "= urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.' +", "print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in", "err: print(err) raise Exception('ERROR: ' + err) print('INFO: ' + filename + '", "{'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS', response, physicalResourceId) return(json.dumps(response))", "= {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS', response, physicalResourceId)", "401: response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS',", "if err.status == 401: return err.status print(err) raise Exception('ERROR: http error') except Exception", "'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__", "except urllib.error.HTTPError as err: if err.status == 401: return err.status print(err) raise Exception('ERROR:", "print('INFO: ' + filename + ' was downloaded') return 200 def put_to_s3(filename): with", "+ json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers,", "{0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False):", "= '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile", "event: response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event and", "+ filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not", "+ '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum:", "physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] =", "CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name)", "try: for filename in put_files: status = download_file(filename) if status == 401: break", "+ '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name,", "response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event, context, 'SUCCESS', response,", "filename + ' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename", "+ filename + '.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' +", "bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename):", "return err.status print(err) raise Exception('ERROR: http error') except Exception as err: print(err) raise", "not in calcurated_checksum: print('ERROR: checksum is different. download is failed') return False with", "if event and 'RequestType' in event: if status == 401: response = {'status':", "data, filename='/tmp/' + filename + '.' + suffix) except urllib.error.HTTPError as err: if", "= download_file(filename) if status == 401: break put_to_s3(filename) except Exception as e: print(e)", "'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']:", "print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData,", "os import tarfile import urllib.error import urllib.parse import urllib.request import boto3 # get", "to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): #", "response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in CloudWatch Log Stream: '", "executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status =", "'' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId']", "= responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json',", "print(json.dumps(event)) try: for filename in put_files: status = download_file(filename) if status == 401:", "print('Status code: ' + str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..):", "__license__ = 'MIT-0' __author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json", "Exception('ERROR: ' + err) print('INFO: ' + filename + ' was downloaded') return", "Exception as e: print(e) if event and 'RequestType' in event: response = {'failed_reason':", "urllib.error.HTTPError as err: if err.status == 401: return err.status print(err) raise Exception('ERROR: http", "= ('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.') __version__ =", "# https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason']", "'<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile import", "impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket", "directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename + '.mmdb'", "os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda environment')", "def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key,", "= 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz',", "filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in", "'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/'", "or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com,", "'.' + suffix) except urllib.error.HTTPError as err: if err.status == 401: return err.status", "its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__", "put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values", "+ filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb =", "event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status))", "= 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile import urllib.error import", "+ filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format(", "+ str(e)) def lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if event:", "uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None,", "put_files: status = download_file(filename) if status == 401: break put_to_s3(filename) except Exception as", "= os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix =", "} req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status", "= {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in CloudWatch Log", "if event and 'RequestType' in event: response = {'failed_reason': e} send(event, context, 'FAILED',", "is different. download is failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz',", "urllib.parse import urllib.request import boto3 # get var from lambda environment try: s3bucket_name", "affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or", "different. download is failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz')", "KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3", "= hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is different. download is", "['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key': license_key, 'suffix': suffix} data = urllib.parse.urlencode(values)", "import hashlib import json import os import tarfile import urllib.error import urllib.parse import", "response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body)", "['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id':", "s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get", "physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for filename in", "json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers = {'content-type': 'application/json', } req", "'None' if event: print(json.dumps(event)) try: for filename in put_files: status = download_file(filename) if", "Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1' __license__", "+ filename + ' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' +", "e: print('send(..) failed executing requests.put(..): ' + str(e)) def lambda_handler(event, context): physicalResourceId =", "method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as", "in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or", "err: if err.status == 401: return err.status print(err) raise Exception('ERROR: http error') except", "All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its", "Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] =", "get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except", "status = 'None' if event: print(json.dumps(event)) try: for filename in put_files: status =", "Exception as err: print(err) raise Exception('ERROR: ' + err) print('INFO: ' + filename", "raise Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 =", "'.tar.gz.sha256') as f: checksum = f.read().split()[0] print('INFO: Checksum: ' + checksum) with open('/tmp/'", "import boto3 # get var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key", "download is failed') return False with tarfile.open('/tmp/' + filename + '.tar.gz', 'r:gz') as", "err.status print(err) raise Exception('ERROR: http error') except Exception as err: print(err) raise Exception('ERROR:", "= ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] =", "lambda_handler(event, context): physicalResourceId = 'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for", "license_key, 'suffix': suffix} data = urllib.parse.urlencode(values) try: urllib.request.urlretrieve( url + data, filename='/tmp/' +", "responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See", "mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl", "+ filename + '.' + suffix) except urllib.error.HTTPError as err: if err.status ==", "import urllib.parse import urllib.request import boto3 # get var from lambda environment try:", "= os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files", "tarfile import urllib.error import urllib.parse import urllib.request import boto3 # get var from", "in calcurated_checksum: print('ERROR: checksum is different. download is failed') return False with tarfile.open('/tmp/'", "e} send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in event: if", "its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ = ('Copyright Amazon.com, Inc.", "= tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename + '.mmdb' s3obj", "and 'RequestType' in event: if status == 401: response = {'status': 'invalide_license_key'} else:", "'/' + filename + '.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/'", "break put_to_s3(filename) except Exception as e: print(e) if event and 'RequestType' in event:", "'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum", "noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n' + json_response_body) headers =", "filename + '.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory", "s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to", "tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/' + filename + '.mmdb' s3obj =", "open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest() if checksum", "s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?'", "'.tar.gz', 'r:gz') as tf: directory = tf.getmembers()[0].name tf.extractall(path='/tmp/') mmdb = directory + '/'", "' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256')", "= event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the", "Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: MIT-0 __copyright__ =", "and 'RequestType' in event: response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId)", "('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.') __version__ = '2.7.1'", "os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible to get lambda environment') s3key_prefix = os.environ.get('s3key_prefix',", "hashlib.sha256(f.read()).hexdigest() if checksum not in calcurated_checksum: print('ERROR: checksum is different. download is failed')", "= {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in", "s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb, s3bucket_name, s3obj)) def send(event, context, responseStatus,", "put_to_s3(filename) except Exception as e: print(e) if event and 'RequestType' in event: response", "the details in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = (", "'.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO:", "+ '.' + suffix) except urllib.error.HTTPError as err: if err.status == 401: return", "filename in put_files: status = download_file(filename) if status == 401: break put_to_s3(filename) except", "Rights Reserved.') __version__ = '2.7.1' __license__ = 'MIT-0' __author__ = '<NAME>' __url__ =", "environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError: raise Exception('ERROR: impossible", "' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId']", "checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum = hashlib.sha256(f.read()).hexdigest()", "status == 401: response = {'status': 'invalide_license_key'} else: response = {'status': 'downloaded'} send(event,", "+ checksum) with open('/tmp/' + filename + '.tar.gz', 'rb') as f: calcurated_checksum =", "responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl) response_body = {} response_body['Status']", "physicalResourceId or context.log_stream_name) response_body['StackId'] = event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho']", "downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f:", "response_body = {} response_body['Status'] = responseStatus response_body['Reason'] = ('See the details in CloudWatch", "= ('See the details in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId']", "= s3.Bucket(s3bucket_name) url = 'https://download.maxmind.com/app/geoip_download?' put_files = ['GeoLite2-City', 'GeoLite2-ASN', 'GeoLite2-Country'] def download_file(filename): for", "('See the details in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] =", "+ '.mmdb' s3obj = s3key_prefix + filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj)", "was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as", "+ ' was downloaded') return 200 def put_to_s3(filename): with open('/tmp/' + filename +", "lambda environment') s3key_prefix = os.environ.get('s3key_prefix', 'GeoLite2/') s3 = boto3.resource('s3') bucket = s3.Bucket(s3bucket_name) url", "return 200 def put_to_s3(filename): with open('/tmp/' + filename + '.tar.gz.sha256') as f: checksum", "s3bucket_name, s3obj)) def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl =", "headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception", "try: res = urllib.request.urlopen(req) print('Status code: ' + str(res.status)) except Exception as e:", "as err: print(err) raise Exception('ERROR: ' + err) print('INFO: ' + filename +", "calcurated_checksum: print('ERROR: checksum is different. download is failed') return False with tarfile.open('/tmp/' +", "= urllib.request.Request( event['ResponseURL'], json_response_body.encode(), headers=headers, method='PUT') try: res = urllib.request.urlopen(req) print('Status code: '", "event and 'RequestType' in event: if status == 401: response = {'status': 'invalide_license_key'}", "body:\\n' + json_response_body) headers = {'content-type': 'application/json', } req = urllib.request.Request( event['ResponseURL'], json_response_body.encode(),", "err.status == 401: return err.status print(err) raise Exception('ERROR: http error') except Exception as", "error') except Exception as err: print(err) raise Exception('ERROR: ' + err) print('INFO: '", "response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response", "event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] = responseData json_response_body = json.dumps(response_body) print('Response body:\\n' +", "context, 'FAILED', response, physicalResourceId) if event and 'RequestType' in event: if status ==", "try: urllib.request.urlretrieve( url + data, filename='/tmp/' + filename + '.' + suffix) except", "event: print(json.dumps(event)) try: for filename in put_files: status = download_file(filename) if status ==", "__url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import tarfile import urllib.error", "details in CloudWatch Log Stream: ' '' + context.log_stream_name) response_body['PhysicalResourceId'] = ( physicalResourceId", "= 'geoipdb' status = 'None' if event: print(json.dumps(event)) try: for filename in put_files:", "'RequestType' in event: response = {'failed_reason': e} send(event, context, 'FAILED', response, physicalResourceId) if", "response_body['Reason'] = ('See the details in CloudWatch Log Stream: ' '' + context.log_stream_name)", "var from lambda environment try: s3bucket_name = os.environ['s3bucket_name'] license_key = os.environ['license_key'] except KeyError:", "event and 'RequestType' in event: response = {'failed_reason': e} send(event, context, 'FAILED', response,", "code: ' + str(res.status)) except Exception as e: print('send(..) failed executing requests.put(..): '", "print(e) if event and 'RequestType' in event: response = {'failed_reason': e} send(event, context,", "'FAILED', response, physicalResourceId) if event and 'RequestType' in event: if status == 401:", "+ err) print('INFO: ' + filename + ' was downloaded') return 200 def", "__author__ = '<NAME>' __url__ = 'https://github.com/aws-samples/siem-on-amazon-opensearch-service' import hashlib import json import os import", "json import os import tarfile import urllib.error import urllib.parse import urllib.request import boto3", "Exception('ERROR: http error') except Exception as err: print(err) raise Exception('ERROR: ' + err)", "filename + '.mmdb' bucket.upload_file('/tmp/' + mmdb, s3obj) print('INFO: uploaded {0} to s3://{1}/{2}'.format( mmdb,", "except Exception as e: print(e) if event and 'RequestType' in event: response =", "'GeoLite2-Country'] def download_file(filename): for suffix in ['tar.gz', 'tar.gz.sha256']: values = {'edition_id': filename, 'license_key':", "= event['StackId'] response_body['RequestId'] = event['RequestId'] response_body['LogicalResourceId'] = event['LogicalResourceId'] response_body['NoEcho'] = noEcho response_body['Data'] =", "def send(event, context, responseStatus, responseData, physicalResourceId=None, noEcho=False): # https://docs.aws.amazon.com/ja_jp/AWSCloudFormation/latest/UserGuide/cfn-lambda-function-code-cfnresponsemodule.html responseUrl = event['ResponseURL'] print(responseUrl)", "MIT-0 __copyright__ = ('Copyright Amazon.com, Inc. or its affiliates. ' 'All Rights Reserved.')" ]
[ "in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in", "Unless required by applicable law or agreed to in writing, software # distributed", "Save the position offset # Calculate a new start time relativeSlaveTime = slaveTime[i]", "export laws and regulations. # The user has the responsibility to obtain export", "= ('Location at which to compute baselines - \"all\" implies '+ 'top, middle,", "return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self):", "def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def", "cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) )", "= [] return None def createPorts(self): # Set input ports # It looks", "port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb = []", "the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the", "None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None", "range and azimuth pixel sizes # the two starting ranges, a planet, and", "port() lookVector = self.calculateLookVector() az_offset = [] vb = [] hb = []", "range and azimuth pixel sizes, starting ranges, # satellite heights and times for", "Component, Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector #", "c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between two frames", "[(x2[j] - x1[j]) for j in range(len(x1))] # Calculate the difference between the", "master image, '+ '\"middle\" implies near middle of master image. '+ 'To be", "near middle of master image. '+ 'To be used in case there is", "# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed", "asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize)", "midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius =", "vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the \"velocity\" component", "z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius", "+= MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize", "orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn", "vector v = MM.normalizeVector(v) # Turn the velocity vector into a unit vector", "if not (size == 3): print(\"Error. Expecting input vectors of length 3.\") raise", "sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity", "self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None self.height", "retstr += \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range", "def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def", "\"velocity\" component that is perpendicular to the cross-track direction and position basis =", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2", "time. for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb", "Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial Baseline estimates \\n\"", "MM.norm(v) return normV # Given an orbit and a time, calculate an orthogonal", "the gross azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg =", "the responsibility to obtain export licenses, or other export # authority as may", "class to hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self):", "middle of master image. '+ 'To be used in case there is a", "= MM.crossProduct(c,r) # Calculate a the \"velocity\" component that is perpendicular to the", "# # United States Government Sponsorship acknowledged. This software is subject to #", "self.height) ) # print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range: ',", "Government Sponsorship acknowledged. This software is subject to # U.S. export control laws", "default = 'all', type=str, mandatory=False, doc = ('Location at which to compute baselines", "'all', type=str, mandatory=False, doc = ('Location at which to compute baselines - \"all\"", "length.\") raise Exception if not (size == 3): print(\"Error. Expecting input vectors of", "= [] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using", "self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of", "need two orbits, a time, range and azimuth pixel sizes # the two", "s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 =", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom':", "and vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating", "self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master", ") # print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1)", "orbits # These provide the range and azimuth pixel sizes, starting ranges, #", "and a basis, calculate the offset between the two positions in this basis", "Baseline at the start of the scene, mid-scene, and the end of the", "+ self.height) ) # print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range:", "= self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last baseline point if", "a prohibited end use). By downloading this software, # the user agrees to", "at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() +", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in range(size):", "attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0]", "= csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2", "= (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 -", "self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom", "self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None self.lookAngle", "= {} self.mandatoryVariables = [] self.optionalVariables = [] return None def createPorts(self): #", "self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables", "implies near start of master image, '+ '\"bottom\" implies at bottom of master", "Calculate the gross azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg", "'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location at which to compute", "this is the c, or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r)", "and # limitations under the License. # # United States Government Sponsorship acknowledged.", "the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length", "r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to the", "masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master", "at bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0),", "the License. # # United States Government Sponsorship acknowledged. This software is subject", "or in support of a prohibited end use). By downloading this software, #", "%s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,)", "crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2]", "which to compute baselines - \"all\" implies '+ 'top, middle, bottom of master", "None self.rangePixelSize = None self.azimuthPixelSize = None self.height = None self.radius = None", "# Calculate the look vector of the master frame def calculateLookVector(self): try: z", "import datetime import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule", "[cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite')", "image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() ==", "= frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject()", "vb = [] hb = [] csb = [] asb = [] s", "Required except when exporting to an embargoed country, # end user, or in", "midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit", "master image, '+ '\"top\" implies near start of master image, '+ '\"bottom\" implies", "= self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize =", "time and a method that calls this method #TODO multiple times to calculate", "10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 =", "the Look Angle of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return", "# U.S. export control laws and regulations and has been classified as 'EAR99", "'\"middle\" implies near middle of master image. '+ 'To be used in case", "2010 California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under the", "perpendicular to the platform position and velocity, this is the c, or cross-track", "try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur", "def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3", "the baseline between #TODO frames when given a master time and a slave", "# Calculate the Baseline at the start of the scene, mid-scene, and the", "cross-track and velocity directions # based on the spacecraft position def calculateBasis(self,orbit,time): sv", "self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset", "export # authority as may be required before exporting this software to any", "return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return", "frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self):", "and velocity at the start of the scene self.logger.info(\"Sampling time %s\" % i)", "[Export] License Required except when exporting to an embargoed country, # end user,", "print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL:", "= MM.normalizeVector(v) # Turn the velocity vector into a unit vector c =", "not use this file except in compliance with the License. # You may", "self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables", "California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache", "= 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location at which to", "for j in range(len(x1))] # Calculate the difference between the master and slave", "self.rangePixelSize = None self.azimuthPixelSize = None self.height = None self.radius = None self.startingRange1", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,)", "Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst +=", "MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize =", "Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the", "under the License. # # United States Government Sponsorship acknowledged. This software is", "self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def", "= self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition()", "to obtain export licenses, or other export # authority as may be required", "for i in range(size) ] for j in range(size)] for j in range(size):", "unit vector v = MM.normalizeVector(v) # Turn the velocity vector into a unit", "0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV =", "agreed to in writing, software # distributed under the License is distributed on", "normV = MM.norm(v) return normV # Given an orbit and a time, calculate", "regulations. # The user has the responsibility to obtain export licenses, or other", "self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle", "is perpendicular to the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v)", "h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets azb_avg =", "\"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst", "', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar", "start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate", "polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors", "setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v", "getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self):", "self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit", "= csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset =", "a slave time and a method that calls this method #TODO multiple times", "verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1]", "raise Exception Y = [0]*size A = [0]*size M = [[0 for i", "'+ '\"middle\" implies near middle of master image. '+ 'To be used in", "frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def", "Basis(object): def __init__(self): self.x1 = [] self.x2 = [] self.x3 = [] #", "x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset #", "z = self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height +", "[self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline at the start of", "# Calculate a the \"velocity\" component that is perpendicular to the cross-track direction", "ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2", "j in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j]", "to in writing, software # distributed under the License is distributed on an", "First, get the position and velocity at the start of the scene self.logger.info(\"Sampling", "implied. # See the License for the specific language governing permissions and #", "of the scene # First, get the position and velocity at the start", "datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master image') masterTime", "= self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to the baseline polynomial", "frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR", "self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial Baseline estimates \\n\" retstr", "setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name =", "getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self):", "Calculate the Baseline at the start of the scene, mid-scene, and the end", "calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r =", "time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the", "scene, mid-scene, and the end of the scene # First, get the position", "(self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline:", "class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return", "Calculate the look vector of the master frame def calculateLookVector(self): try: z =", "frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from", "master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look", "family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look", "class Basis(object): def __init__(self): self.x1 = [] self.x2 = [] self.x3 = []", "az_offset.append(z_offset) # Save the position offset # Calculate a new start time relativeSlaveTime", "image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() ==", "offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset =", "specific language governing permissions and # limitations under the License. # # United", "self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst", "return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return", "+= \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst", "* 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1", "component that is perpendicular to the cross-track direction and position basis = BaselineBasis()", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline", "classified as 'EAR99 NLR' # (No [Export] License Required except when exporting to", "Set input ports # It looks like we really need two orbits, a", "scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV", "into a method that calculates the baseline between #TODO frames when given a", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "export licenses, or other export # authority as may be required before exporting", "self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None self.height = None self.radius", "you may not use this file except in compliance with the License. #", "type=str, mandatory=False, doc = ('Location at which to compute baselines - \"all\" implies", "deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i],", "def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None self.slaveFrame", "= None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop =", "MM from isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional basis vectors", "print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the", "location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime", "= ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height", "+ vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components by the look", "= crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop =", "self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom", "deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2", "getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION", "basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors", "'bottom': print('Estimating baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0),", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under", "#TODO This could be further refactored into a method that calculates the baseline", "def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in range(len(x1))] # Calculate", "Calculate a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef)", "crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0]", "the last baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0])", "self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(),", "{} self.mandatoryVariables = [] self.optionalVariables = [] return None def createPorts(self): # Set", "def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def", "except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/(", "of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif", "datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3):", "baseline change over time. for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset", "and azimuth pixel sizes, starting ranges, # satellite heights and times for the", "= frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try:", "perpendicular to the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c)", "(self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk", "Calculate the difference between the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector())", "def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days *", "MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform position and velocity, this", "Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location at", "def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def", "#Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate", "of master image, '+ '\"middle\" implies near middle of master image. '+ 'To", "3.\") raise Exception Y = [0]*size A = [0]*size M = [[0 for", "vectors and a basis, calculate the offset between the two positions in this", "return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius = radius return", "', self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ',", "= (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular", "+= \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\"", "master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower()", "laws and regulations. # The user has the responsibility to obtain export licenses,", "pixelSize return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius = radius", "a basis, calculate the offset between the two positions in this basis def", "baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] =", "None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None", "datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of", "self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl)", "# Set input ports # It looks like we really need two orbits,", "isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector # A class to", "math import datetime import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import", "# satellite heights and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort", "azimuth pixel sizes, starting ranges, # satellite heights and times for the first", "used in case there is a large shift between images.') ) class Baseline(Component):", "class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate", "The user has the responsibility to obtain export licenses, or other export #", "self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master frame def", "'\"top\" implies near start of master image, '+ '\"bottom\" implies at bottom of", "'+ '\"top\" implies near start of master image, '+ '\"bottom\" implies at bottom", "MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between two", "# the two starting ranges, a planet, and the two prfs # These", "'top, middle, bottom of master image, '+ '\"top\" implies near start of master", "{} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = []", "provide the orbits # These provide the range and azimuth pixel sizes, starting", "retstr = \"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst =", "the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j])", "'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of the master frame", "the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)):", "= self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last", "time, range and azimuth pixel sizes # the two starting ranges, a planet,", "print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl =", "normV # Given an orbit and a time, calculate an orthogonal basis for", "= [] self.x2 = [] self.x3 = [] # A class to hold", "vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] #", "= None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom =", "the platform position and velocity, this is the c, or cross-track vector c", "# Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables =", "the scene, mid-scene, and the end of the scene # First, get the", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "user, or in support of a prohibited end use). By downloading this software,", "self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''):", "the length of the projection of the difference in position and the \"velocity\"", "getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td):", "and azimuth pixel sizes # the two starting ranges, a planet, and the", "self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit", "# print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl)", "Given an orbit and a time, calculate an orthogonal basis for cross-track and", "A = [0]*size M = [[0 for i in range(size) ] for j", "logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of the", "basis for cross-track and velocity directions # based on the spacecraft position def", "position and velocity at the start of the scene self.logger.info(\"Sampling time %s\" %", "and velocity directions # based on the spacecraft position def calculateBasis(self,orbit,time): sv =", "except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame", "in position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return", "creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid =", "See the License for the specific language governing permissions and # limitations under", "self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV", "hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 = [] self.x2 =", "- datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition()", "basis # Given two position vectors and a basis, calculate the offset between", "the start of the scene self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i])", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "len(yRef)): print(\"Error. Expecting input vectors of same length.\") raise Exception if not (size", "to compute baselines - \"all\" implies '+ 'top, middle, bottom of master image,", "basis.setCrossTrackVector(c) return basis # Given two position vectors and a basis, calculate the", "the two prfs # These provide the orbits # These provide the range", "# Given two position vectors and a basis, calculate the offset between the", "self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset", "print(\"Error. Expecting input vectors of same length.\") raise Exception if not (size ==", "createPorts(self): # Set input ports # It looks like we really need two", "the position vector into a unit vector v = MM.normalizeVector(v) # Turn the", "slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial Baseline", "= crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate =", "i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for", "raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() +", "+ self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ',", "method that calculates the baseline between #TODO frames when given a master time", "calculate the offset between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx", "the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis", "between two frames def baseline(self): #TODO This could be further refactored into a", "based on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 =", "getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0", "basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors and a basis,", "Institute of Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache License,", "between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] -", "between #TODO frames when given a master time and a slave time and", "- asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc", "this method #TODO multiple times to calculate the rate of baseline change over", "sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector into a unit vector", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False,", "isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional basis vectors class Basis(object):", "def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def", "%s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,)", "user has the responsibility to obtain export licenses, or other export # authority", "masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr", "csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 #", "2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) # print('Radius: ', self.radius) #", "A class to hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def", "the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate", "BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1", "vector of the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z", "sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1)", "this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in range(len(x1))]", "= (BASELINE_LOCATION,) # Calculate the Look Angle of the master frame def calculateLookAngle(self):", "#TODO multiple times to calculate the rate of baseline change over time. for", "= None self.azimuthPixelSize = None self.height = None self.radius = None self.startingRange1 =", "end use). By downloading this software, # the user agrees to comply with", "regulations and has been classified as 'EAR99 NLR' # (No [Export] License Required", "self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3", "= self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset)", "# Calculate the length of the projection of the difference in position and", "{0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime =", "range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv =", "[self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation))", "required before exporting this software to any 'EAR99' # embargoed foreign country or", "= self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self,", "return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var) return", "and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two", "self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger", "an orthogonal basis for cross-track and velocity directions # based on the spacecraft", "getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self):", "KIND, either express or implied. # See the License for the specific language", "for j in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] =", "j in range(len(x1))] # Calculate the difference between the master and slave position", "self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) +", "# the user agrees to comply with all applicable U.S. export laws and", "self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame =", "self.baselineLocation.lower() == 'all': print('Using entire span of image for estimating baselines') masterTime =", "ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height =", "asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc =", "citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime", "self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def", "+ self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) # print('Radius: ',", "as 'EAR99 NLR' # (No [Export] License Required except when exporting to an", "ANY KIND, either express or implied. # See the License for the specific", "'+ '\"bottom\" implies at bottom of master image, '+ '\"middle\" implies near middle", "%s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance", "start of the scene self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV", "position vectors and a basis, calculate the offset between the two positions in", "any 'EAR99' # embargoed foreign country or citizen of those countries. # #", "#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. # #", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "= range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop", "az_offset = [] vb = [] hb = [] csb = [] asb", "with all applicable U.S. export laws and regulations. # The user has the", "fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef)", "start of master image, '+ '\"bottom\" implies at bottom of master image, '+", "= [] hb = [] csb = [] asb = [] s =", "mid-scene, and the end of the scene # First, get the position and", "cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis #", "return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return", "Calculate the distance moved since the last baseline point if (i > 0):", "at the start of the scene self.logger.info(\"Sampling time %s\" % i) masterBasis =", "offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) #", "is the c, or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) #", "[] self.optionalVariables = [] return None def createPorts(self): # Set input ports #", "= [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating", "== 'all': print('Using entire span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()]", "method that calls this method #TODO multiple times to calculate the rate of", "range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize", "public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location at which", "permissions and # limitations under the License. # # United States Government Sponsorship", "the scene self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i])", "image, '+ '\"bottom\" implies at bottom of master image, '+ '\"middle\" implies near", "# Calculate a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size =", "be used in case there is a large shift between images.') ) class", "subject to # U.S. export control laws and regulations and has been classified", "# Calculate the vector perpendicular to the platform position and velocity, this is", "platform position and velocity, this is the c, or cross-track vector c =", "None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline')", "radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range", "getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds", "spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV =", "', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl]", "self.x1 = [] self.x2 = [] self.x3 = [] # A class to", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "self.masterFrame = None self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize", "to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) ==", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 =", "for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return", "= c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default", "size = len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of", "the position and velocity at the start of the scene self.logger.info(\"Sampling time %s\"", "large shift between images.') ) class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline'", "the start of the scene, mid-scene, and the end of the scene #", "applicable law or agreed to in writing, software # distributed under the License", "= Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc = ('Location", "change over time. for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset =", "components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients =", "getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str,", "\"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the", "\"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst +=", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "= frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize", "of Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache License, Version", "retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr +=", "writing, software # distributed under the License is distributed on an \"AS IS\"", "ports # It looks like we really need two orbits, a time, range", "verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset", "for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x", "= self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets", "= orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV # Given", "frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid", "- datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() -", "# It looks like we really need two orbits, a time, range and", "= frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV =", "the baseline components between two frames def baseline(self): #TODO This could be further", "compliance with the License. # You may obtain a copy of the License", "entire span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() ==", "of same length.\") raise Exception if not (size == 3): print(\"Error. Expecting input", "def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def", "= MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of the difference in", "self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg", "= masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position", "# Calculate the baseline components between two frames def baseline(self): #TODO This could", "Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr +=", "logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {}", "None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None", "we really need two orbits, a time, range and azimuth pixel sizes #", "v = sv.getVelocity() normV = MM.norm(v) return normV # Given an orbit and", "= az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a", "frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame", "heights and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame)", "class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop =", "= slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate", "= len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of same", "basis, calculate the offset between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis):", "('Location at which to compute baselines - \"all\" implies '+ 'top, middle, bottom", "= math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for", "for cross-track and velocity directions # based on the spacecraft position def calculateBasis(self,orbit,time):", "dx = [(x2[j] - x1[j]) for j in range(len(x1))] # Calculate the difference", "self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc", "= [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of image for estimating", "(td.microseconds + (td.seconds + td.days * 24.0 * 3600) * 10**6) / 10**6", "def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def", "self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop", "Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for j in", "(the \"License\"); # you may not use this file except in compliance with", "[self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i", "self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = [] return None def createPorts(self):", "of a prohibited end use). By downloading this software, # the user agrees", "distance moved since the last baseline point if (i > 0): deltaT =", "return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return", "# Unless required by applicable law or agreed to in writing, software #", "other export # authority as may be required before exporting this software to", "by applicable law or agreed to in writing, software # distributed under the", "self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf", "as may be required before exporting this software to any 'EAR99' # embargoed", "self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) #", "when given a master time and a slave time and a method that", "starting ranges, a planet, and the two prfs # These provide the orbits", "+= (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return retstr", "position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity()", "file except in compliance with the License. # You may obtain a copy", "# print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate", "print(\"Error. Expecting input vectors of length 3.\") raise Exception Y = [0]*size A", "These provide the range and azimuth pixel sizes, starting ranges, # satellite heights", "the position offset # Calculate a new start time relativeSlaveTime = slaveTime[i] -", "by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb)", "az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset =", "self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop", "in support of a prohibited end use). By downloading this software, # the", "offset # Calculate a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV", "self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial Baseline estimates \\n\" retstr +=", "def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return", "and has been classified as 'EAR99 NLR' # (No [Export] License Required except", "def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting input", "input ports # It looks like we really need two orbits, a time,", "self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate a new start time", "MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of the difference in position", "or citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import", "basis vectors class Basis(object): def __init__(self): self.x1 = [] self.x2 = [] self.x3", "self.x3 = [] # A class to hold three-dimensional basis vectors for spacecraft", "software is subject to # U.S. export control laws and regulations and has", "elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master image') masterTime =", "(-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class", "try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height", "= verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset =", "= self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master frame", "licenses, or other export # authority as may be required before exporting this", "None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None", "Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2", "a time, range and azimuth pixel sizes # the two starting ranges, a", "x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply", "# slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline at", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span", "of the projection of the difference in position and the \"velocity\" component v_offset", "self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None self.lookAngle = None self.rangePixelSize", "- datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at", "math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master frame def calculateLookVector(self): try:", "= range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self):", "MM.normalizeVector(x1) # Turn the position vector into a unit vector v = MM.normalizeVector(v)", "a planet, and the two prfs # These provide the orbits # These", "retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr +=", "self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic fit", "look vector of the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except:", "planet, and the two prfs # These provide the orbits # These provide", "# # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component", "the scene # First, get the position and velocity at the start of", "def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def", "ranges, a planet, and the two prfs # These provide the orbits #", "vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform position", "span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle':", "- \"all\" implies '+ 'top, middle, bottom of master image, '+ '\"top\" implies", "= radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 =", "def createPorts(self): # Set input ports # It looks like we really need", "self.radius = None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate", "two frames def baseline(self): #TODO This could be further refactored into a method", "cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv =", "Calculate the Look Angle of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector()", "self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc", "to an embargoed country, # end user, or in support of a prohibited", "/ 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange()", "(td.seconds + td.days * 24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self):", "return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range return", "__str__(self): retstr = \"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst", "math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for j", "vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components by the look angle", "azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg", "getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def", "position offset # Calculate a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV))", "new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') #", "def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all',", "self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a()", "x1[j]) for j in range(len(x1))] # Calculate the difference between the master and", "= float(var) return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 =", "when exporting to an embargoed country, # end user, or in support of", "# (No [Export] License Required except when exporting to an embargoed country, #", "self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset)", "= pixelSize return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius =", "range(3): # Calculate the Baseline at the start of the scene, mid-scene, and", "two prfs # These provide the orbits # These provide the range and", "to comply with all applicable U.S. export laws and regulations. # The user", "= [[0 for i in range(size) ] for j in range(size)] for j", "self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset", "vector into a unit vector v = MM.normalizeVector(v) # Turn the velocity vector", "== len(yRef)): print(\"Error. Expecting input vectors of same length.\") raise Exception if not", "frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(),", "td.days * 24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self): frame =", "range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return", "self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION',", "+= \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset:", "= [0]*size M = [[0 for i in range(size) ] for j in", "self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables", "self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the", "z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) # print('Radius:", "= 'all', type=str, mandatory=False, doc = ('Location at which to compute baselines -", "masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i],", "master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise", "range(len(x1))] # Calculate the difference between the master and slave position vectors z_offset", "def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2", ") class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) #", "Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = []", "x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return", "range(size)] for j in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j]", "slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate a", "the Baseline at the start of the scene, mid-scene, and the end of", "the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0", "to the platform position and velocity, this is the c, or cross-track vector", "- self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate =", "self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master", "time and a slave time and a method that calls this method #TODO", "provide the range and azimuth pixel sizes, starting ranges, # satellite heights and", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "this software to any 'EAR99' # embargoed foreign country or citizen of those", "start of the scene, mid-scene, and the end of the scene # First,", "z_offset,v_offset,c_offset # Calculate the baseline components between two frames def baseline(self): #TODO This", "elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master image') masterTime =", "slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()]", "for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb =", "image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown", "+ z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) #", "into a unit vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular to", "+ az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "MM.invertMatrix(M) for i in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return", "self.optionalVariables = [] return None def createPorts(self): # Set input ports # It", "middle, bottom of master image, '+ '\"top\" implies near start of master image,", "baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the", "relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets", "setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate", "+ deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition()", "3600) * 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame", "masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the", "r = MM.normalizeVector(x1) # Turn the position vector into a unit vector v", "of the difference in position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset", "= [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating", "- x1[j]) for j in range(len(x1))] # Calculate the difference between the master", "self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset", "#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component import Component, Port from", "return normV # Given an orbit and a time, calculate an orthogonal basis", "return None def createPorts(self): # Set input ports # It looks like we", "def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def", "self.height = float(var) return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1", "foreign country or citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import", "of the scene self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV =", "the License for the specific language governing permissions and # limitations under the", "self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last baseline point if (i", "(i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV", "to hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 = [] self.x2", "vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of the", "frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight", "', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1", "- cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv", "image. '+ 'To be used in case there is a large shift between", "None self.masterFrame = None self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None", "a time, calculate an orthogonal basis for cross-track and velocity directions # based", "= frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height =", "Calculate the baseline components between two frames def baseline(self): #TODO This could be", "= None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() #", "\"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr", "= [(x2[j] - x1[j]) for j in range(len(x1))] # Calculate the difference between", "= midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius", "getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self):", "None self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None", "a method that calculates the baseline between #TODO frames when given a master", "baseline between #TODO frames when given a master time and a slave time", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "[] self.x3 = [] # A class to hold three-dimensional basis vectors for", "verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset", "%s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr", "and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the", "[] self.x2 = [] self.x3 = [] # A class to hold three-dimensional", "0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height)", "a master time and a slave time and a method that calls this", "# Calculate a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV =", "and regulations. # The user has the responsibility to obtain export licenses, or", "range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i in", "= yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for j in range(size):", "RIGHTS RESERVED. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "baselines around center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid()", "def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius", "License Required except when exporting to an embargoed country, # end user, or", "def __str__(self): retstr = \"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\"", "in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return", "image, '+ '\"middle\" implies near middle of master image. '+ 'To be used", "self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset", "= None self.height = None self.radius = None self.startingRange1 = None self.startingRange2 =", "csb = [] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all':", "+ datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom", "= self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z)", "slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline at the", "self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV", "be further refactored into a method that calculates the baseline between #TODO frames", "bottom of master image, '+ '\"middle\" implies near middle of master image. '+", "= sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector into a unit", "two position vectors and a basis, calculate the offset between the two positions", "Version 2.0 (the \"License\"); # you may not use this file except in", "iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import", "# Calculate the Look Angle of the master frame def calculateLookAngle(self): lookVector =", "if self.baselineLocation.lower() == 'all': print('Using entire span of image for estimating baselines') masterTime", "= MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between two frames def", "addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit()", "time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the", "the offset between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx =", "Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {}", "def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst def", "for j in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize =", "= verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom =", "master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl", "csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1", "i in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return A def", "(self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate", "self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate", "positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j", "velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v)", "scene self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) #", "print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return", "print('Estimating baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop()", "Look Angle of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0]))", "'+ 'To be used in case there is a large shift between images.')", "= MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform position and velocity,", "\"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\"", "the difference in position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset =", "angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb)", "spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v =", "this software, # the user agrees to comply with all applicable U.S. export", "masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset", "ALL RIGHTS RESERVED. # # Licensed under the Apache License, Version 2.0 (the", "None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None", "return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize", "and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0", "= [] csb = [] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower()", "v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return", "slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 =", "sizes, starting ranges, # satellite heights and times for the first lines masterFramePort", "+ (td.seconds + td.days * 24.0 * 3600) * 10**6) / 10**6 def", "the user agrees to comply with all applicable U.S. export laws and regulations.", "self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation',", "None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy", "spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def", "Expecting input vectors of same length.\") raise Exception if not (size == 3):", "# Calculate the distance moved since the last baseline point if (i >", "velocity vector into a unit vector c = MM.crossProduct(r,v) # Calculate the vector", "method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV # Given an orbit", "self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop()", "calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the", "setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var):", "24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame", "baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients", "look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients =", "calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV", "image, '+ '\"top\" implies near start of master image, '+ '\"bottom\" implies at", "of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif", "component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline", "Exception if not (size == 3): print(\"Error. Expecting input vectors of length 3.\")", "= frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid", "as MM from isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional basis", "# These provide the range and azimuth pixel sizes, starting ranges, # satellite", "OF ANY KIND, either express or implied. # See the License for the", "the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients", "- masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV =", "Calculate a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime,", "= -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old", "slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection", "comply with all applicable U.S. export laws and regulations. # The user has", "+= (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk", "use). By downloading this software, # the user agrees to comply with all", "= frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit =", "to the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return", "return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return", "center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)]", "refactored into a method that calculates the baseline between #TODO frames when given", "A class to hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 =", "control laws and regulations and has been classified as 'EAR99 NLR' # (No", "self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1]", "basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in range(len(x1))] #", "quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not", "self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame", "[] vb = [] hb = [] csb = [] asb = []", "and velocity, this is the c, or cross-track vector c = MM.normalizeVector(c) v", "could be further refactored into a method that calculates the baseline between #TODO", "c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the \"velocity\" component that", "name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None", "length of the projection of the difference in position and the \"velocity\" component", "self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height) # print('Radius: ', self.radius)", "= None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family,", "= Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial Baseline estimates", "- datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in", "def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return", "estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical", "top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)]", "{} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = [] return None def", "= None self.slaveFrame = None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize =", "def __init__(self): self.x1 = [] self.x2 = [] self.x3 = [] # A", "= sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector", "hb = [] csb = [] asb = [] s = [0.,0.,0.] if", "None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name)", "normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last baseline point", "super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables", "like we really need two orbits, a time, range and azimuth pixel sizes", "def baseline(self): #TODO This could be further refactored into a method that calculates", "Calculate the vector perpendicular to the platform position and velocity, this is the", "Calculate the length of the projection of the difference in position and the", "class to hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 = []", "datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0),", "or agreed to in writing, software # distributed under the License is distributed", "return self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return", "= 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of the master", "self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate", "of the scene, mid-scene, and the end of the scene # First, get", "self.height = None self.radius = None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop", "calculate an orthogonal basis for cross-track and velocity directions # based on the", "raise Exception if not (size == 3): print(\"Error. Expecting input vectors of length", "'+ 'top, middle, bottom of master image, '+ '\"top\" implies near start of", "of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "\"all\" implies '+ 'top, middle, bottom of master image, '+ '\"top\" implies near", "masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top':", "shift between images.') ) class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list", "applicable U.S. export laws and regulations. # The user has the responsibility to", "License. # You may obtain a copy of the License at # #", "yRef[j] MInv = MM.invertMatrix(M) for i in range(size): for j in range(size): A[i]", "= [0]*size A = [0]*size M = [[0 for i in range(size) ]", "of master image, '+ '\"bottom\" implies at bottom of master image, '+ '\"middle\"", "return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c):", "= None self.radius = None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop =", "self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset", "= frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame =", "getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self):", "def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0 * 3600) *", "Expecting input vectors of length 3.\") raise Exception Y = [0]*size A =", "= ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) #", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "= (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg -", "a large shift between images.') ) class Baseline(Component): family = 'baseline' logging_name =", "csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components by the", "and the end of the scene # First, get the position and velocity", "[0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of image for estimating baselines')", "= self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis)", "sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV #", "master image, '+ '\"bottom\" implies at bottom of master image, '+ '\"middle\" implies", "the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None", "c, or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a", "= BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors and", "License, Version 2.0 (the \"License\"); # you may not use this file except", "from isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional basis vectors class", "estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center", "calls this method #TODO multiple times to calculate the rate of baseline change", "except when exporting to an embargoed country, # end user, or in support", "hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def", "position vector into a unit vector v = MM.normalizeVector(v) # Turn the velocity", "3): print(\"Error. Expecting input vectors of length 3.\") raise Exception Y = [0]*size", "datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master image') masterTime", "three-dimensional basis vectors class Basis(object): def __init__(self): self.x1 = [] self.x2 = []", "self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height: ', self.height)", "elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master image') masterTime =", "sizes # the two starting ranges, a planet, and the two prfs #", "the two starting ranges, a planet, and the two prfs # These provide", "self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at", "+ datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the", "h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop", "self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset", "= math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity def", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "at which to compute baselines - \"all\" implies '+ 'top, middle, bottom of", "MInv = MM.invertMatrix(M) for i in range(size): for j in range(size): A[i] +=", "range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset", "the c, or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate", "self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts()", "self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb = [] hb =", "self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop", "last baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i]", "between the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the", "+ datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines at bottom of master image')", "self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc", "frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None", "and a slave time and a method that calls this method #TODO multiple", "__init__(self): self.x1 = [] self.x2 = [] self.x3 = [] # A class", "getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self):", "pixel sizes # the two starting ranges, a planet, and the two prfs", "* 3600) * 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame =", "None self.azimuthPixelSize = None self.height = None self.radius = None self.startingRange1 = None", "near start of master image, '+ '\"bottom\" implies at bottom of master image,", "implies near middle of master image. '+ 'To be used in case there", "method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save", "None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999", "import math import datetime import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule", "return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range return", "self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since the last baseline", "logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as MM from", "self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master image') masterTime = [self.masterFrame.getSensingMid()", "'top': print('Estimating baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() +", "self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc = verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset", "[0]*size A = [0]*size M = [[0 for i in range(size) ] for", "# Calculate the gross azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0", "baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] #", "to any 'EAR99' # embargoed foreign country or citizen of those countries. #", "or implied. # See the License for the specific language governing permissions and", "and regulations and has been classified as 'EAR99 NLR' # (No [Export] License", "baselines - \"all\" implies '+ 'top, middle, bottom of master image, '+ '\"top\"", "= None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset =", "first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def", "= 0.0 cosl = ((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius +", "float(var) return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range", "# A class to hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis):", "and a method that calls this method #TODO multiple times to calculate the", "= None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset =", "lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self):", "[0]*size M = [[0 for i in range(size) ] for j in range(size)]", "cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the \"velocity\"", "import MathModule as MM from isceobj.Orbit.Orbit import StateVector # A class to hold", "[] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of image", "= self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate a new start", "retstr += \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return retstr % retlst", "datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset)", "+ asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 -", "(self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return retstr %", "vector perpendicular to the platform position and velocity, this is the c, or", "master time and a slave time and a method that calls this method", "= None self.masterFrame = None self.slaveFrame = None self.lookAngle = None self.rangePixelSize =", "for i in range(3): # Calculate the Baseline at the start of the", "support of a prohibited end use). By downloading this software, # the user", "vectors class Basis(object): def __init__(self): self.x1 = [] self.x2 = [] self.x3 =", "hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components by", "It looks like we really need two orbits, a time, range and azimuth", "= [] # A class to hold three-dimensional basis vectors for spacecraft baselines", "Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v =", "of the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z =", "self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to the baseline polynomial def", "def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def", "for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M)", "ranges, # satellite heights and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame)", "use this file except in compliance with the License. # You may obtain", "import Component, Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector", "United States Government Sponsorship acknowledged. This software is subject to # U.S. export", "the difference between the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) #", "multiple times to calculate the rate of baseline change over time. for port", "- datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime", "ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp()", "U.S. export control laws and regulations and has been classified as 'EAR99 NLR'", "None self.radius = None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None", "that is perpendicular to the cross-track direction and position basis = BaselineBasis() basis.setPositionVector(r)", "input vectors of same length.\") raise Exception if not (size == 3): print(\"Error.", "all applicable U.S. export laws and regulations. # The user has the responsibility", "exporting this software to any 'EAR99' # embargoed foreign country or citizen of", "getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self):", "getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self):", "around center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() +", "downloading this software, # the user agrees to comply with all applicable U.S.", "for the specific language governing permissions and # limitations under the License. #", "+= \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\" retlst", "slave time and a method that calls this method #TODO multiple times to", "setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c", "x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position", "vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline components", "- h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes", "not (size == 3): print(\"Error. Expecting input vectors of length 3.\") raise Exception", "the \"velocity\" component that is perpendicular to the cross-track direction and position basis", "[] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire", "really need two orbits, a time, range and azimuth pixel sizes # the", "'middle': print('Estimating baselines around center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0),", "lookVector = self.calculateLookVector() az_offset = [] vb = [] hb = [] csb", "[self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines", "'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of", "to hold three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self)", "= None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None self.height =", "[self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=2.0)] elif self.baselineLocation.lower() == 'bottom': print('Estimating baselines", "setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius):", "pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var)", "z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of the difference", "velocity at the start of the scene self.logger.info(\"Sampling time %s\" % i) masterBasis", "= None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc =", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds +", "retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset:", "= [] vb = [] hb = [] csb = [] asb =", "None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None", "vectors of same length.\") raise Exception if not (size == 3): print(\"Error. Expecting", "return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return", "# A class to hold three-dimensional basis vectors class Basis(object): def __init__(self): self.x1", "= MM.norm(v) return normV # Given an orbit and a time, calculate an", "ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame =", "= None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc =", "] for j in range(size)] for j in range(size): for i in range(size):", "self.startingRange2 # Calculate a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size", "A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return", "slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) =", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector # A", "from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit import StateVector # A class", "[] # A class to hold three-dimensional basis vectors for spacecraft baselines class", "of master image. '+ 'To be used in case there is a large", "embargoed country, # end user, or in support of a prohibited end use).", "def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return self.orbSlcAzimuthOffset def", "master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower()", "Given two position vectors and a basis, calculate the offset between the two", "method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the", "BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default = 'all', type=str, mandatory=False, doc =", "scene # First, get the position and velocity at the start of the", "parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle of the master frame def", "return self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return", "= None self.rangePixelSize = None self.azimuthPixelSize = None self.height = None self.radius =", "Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,)", "the look vector of the master frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight", "midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame", "orbit and a time, calculate an orthogonal basis for cross-track and velocity directions", "Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst", "#TODO frames when given a master time and a slave time and a", "with the License. # You may obtain a copy of the License at", "obtain export licenses, or other export # authority as may be required before", "self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop", "= {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables =", "datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime =", "return [cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time,", "# print('Radius: ', self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl", "((self.height-z)*(2*self.radius + self.height + z) + self.startingRange1*self.startingRange1)/( 2*self.startingRange1*(self.radius + self.height) ) # print('Height:", "x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the", "a the \"velocity\" component that is perpendicular to the cross-track direction and position", "\"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst +=", "law or agreed to in writing, software # distributed under the License is", "the velocity vector into a unit vector c = MM.crossProduct(r,v) # Calculate the", "frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None", "= orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) #", "self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets azb_avg", "= [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location:", "country or citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math", "# based on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1", "and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset #", "of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else:", "compute baselines - \"all\" implies '+ 'top, middle, bottom of master image, '+", "directions # based on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite')", "a unit vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular to the", "== 'middle': print('Estimating baselines around center of master image') masterTime = [self.masterFrame.getSensingMid() -", "= None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate =", "= None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst =", "implies '+ 'top, middle, bottom of master image, '+ '\"top\" implies near start", "import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as MM", "# Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component import", "v = MM.crossProduct(c,r) # Calculate a the \"velocity\" component that is perpendicular to", "return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return", "in compliance with the License. # You may obtain a copy of the", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "v = MM.normalizeVector(v) # Turn the velocity vector into a unit vector c", "(No [Export] License Required except when exporting to an embargoed country, # end", "responsibility to obtain export licenses, or other export # authority as may be", "'EAR99' # embargoed foreign country or citizen of those countries. # # Author:", "s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite')", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom", "return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION =", "self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = [] return", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def", "if not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of same length.\") raise", "self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 =", "return z_offset,v_offset,c_offset # Calculate the baseline components between two frames def baseline(self): #TODO", "method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with", "range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def", "# These provide the orbits # These provide the range and azimuth pixel", "basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1", "old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables =", "projection of the difference in position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector())", "= {} self.descriptionOfVariables = {} self.mandatoryVariables = [] self.optionalVariables = [] return None", "def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of", "cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft", "'all': print('Using entire span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif", "Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst +=", "j in range(size)] for j in range(size): for i in range(size): M[j][i] =", "range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def", "MM.normalizeVector(v) # Turn the velocity vector into a unit vector c = MM.crossProduct(r,v)", "ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height", "getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self):", "crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1]", "components between two frames def baseline(self): #TODO This could be further refactored into", "vectors of length 3.\") raise Exception Y = [0]*size A = [0]*size M", "or other export # authority as may be required before exporting this software", "# print('Height: ', self.height) # print('Radius: ', self.radius) # print('Range: ', self.startingRange1) #", "two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for", "MathModule as MM from isceobj.Orbit.Orbit import StateVector # A class to hold three-dimensional", "self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid)", "horizontal and vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0])", "the specific language governing permissions and # limitations under the License. # #", "with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius", "= None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate =", "= 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,) # Calculate the Look Angle", "a unit vector v = MM.normalizeVector(v) # Turn the velocity vector into a", "image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines", "same length.\") raise Exception if not (size == 3): print(\"Error. Expecting input vectors", "acknowledged. This software is subject to # U.S. export control laws and regulations", "method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) =", "a method that calls this method #TODO multiple times to calculate the rate", "retstr += \"Vertical Baseline: %s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\"", "[self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master image') masterTime", "- vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1]", "def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds +", "sv.getPosition() v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector into", "method #TODO multiple times to calculate the rate of baseline change over time.", "to calculate the rate of baseline change over time. for port in self.inputPorts:", "= (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate", "frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize", "= None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset =", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical baseline", "length 3.\") raise Exception Y = [0]*size A = [0]*size M = [[0", "= self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates", "-99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component", "lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master", "in range(3): # Calculate the Baseline at the start of the scene, mid-scene,", "two orbits, a time, range and azimuth pixel sizes # the two starting", "States Government Sponsorship acknowledged. This software is subject to # U.S. export control", "self).__init__(family=self.__class__.family, name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables =", "the projection of the difference in position and the \"velocity\" component v_offset =", "and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort)", "over time. for port in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = []", "calculates the baseline between #TODO frames when given a master time and a", "vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate", "(self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth", "range(size) ] for j in range(size)] for j in range(size): for i in", "times to calculate the rate of baseline change over time. for port in", "(az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize)", "self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self): return", "end user, or in support of a prohibited end use). By downloading this", "None def createPorts(self): # Set input ports # It looks like we really", "this file except in compliance with the License. # You may obtain a", "Turn the position vector into a unit vector v = MM.normalizeVector(v) # Turn", "baseline components between two frames def baseline(self): #TODO This could be further refactored", "the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v", "s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of image for", "def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 =", "position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position", "is a large shift between images.') ) class Baseline(Component): family = 'baseline' logging_name", "is subject to # U.S. export control laws and regulations and has been", "By downloading this software, # the user agrees to comply with all applicable", "= [] self.x3 = [] # A class to hold three-dimensional basis vectors", "= [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline at the start", "that calls this method #TODO multiple times to calculate the rate of baseline", "= (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0]", "i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved since", "into a unit vector v = MM.normalizeVector(v) # Turn the velocity vector into", "baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)): print(\"Error.", "bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()]", "baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStart()", "= self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth", "None self.startingRange1 = None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None", "laws and regulations and has been classified as 'EAR99 NLR' # (No [Export]", "Y = [0]*size A = [0]*size M = [[0 for i in range(size)", "in range(size)] for j in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i)", "at bottom of master image, '+ '\"middle\" implies near middle of master image.", "asb_avg = (asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1", "self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate", "master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of", "self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop = None self.pBaselineBottom = None self.orbSlcAzimuthOffset", "datetime import logging from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as", "= slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the", "of master image, '+ '\"top\" implies near start of master image, '+ '\"bottom\"", "self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 - cosl*cosl) return [cosl,sinl] #", "== 'bottom': print('Estimating baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop() -", "self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate", "crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets azb_avg = (az_offset[0] +", "export control laws and regulations and has been classified as 'EAR99 NLR' #", "(size == 3): print(\"Error. Expecting input vectors of length 3.\") raise Exception Y", "end of the scene # First, get the position and velocity at the", "(len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of same length.\") raise Exception if", "Calculate a the \"velocity\" component that is perpendicular to the cross-track direction and", "of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate", "there is a large shift between images.') ) class Baseline(Component): family = 'baseline'", "moved since the last baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i]", "\\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline:", "= slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2", "datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(), self.masterFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top", "az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) #", "retlst += (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "bottom of master image, '+ '\"top\" implies near start of master image, '+", "[[0 for i in range(size) ] for j in range(size)] for j in", "= None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None self.pBaselineTop =", "return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def getOrbSlcAzimuthOffset(self): return", "self.azimuthPixelSize = None self.height = None self.radius = None self.startingRange1 = None self.startingRange2", "that calculates the baseline between #TODO frames when given a master time and", "= x def getPositionVector(self): return self.x1 def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self):", "None self.lookAngle = None self.rangePixelSize = None self.azimuthPixelSize = None self.height = None", "frame def calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl =", "self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius", "position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate the length of the projection of", "None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc = None", "required by applicable law or agreed to in writing, software # distributed under", "= midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange()", "retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,) retstr += \"Vertical Baseline: %s\\n\"", "Angle of the master frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) #", "= pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height =", "Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] +", "since the last baseline point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] -", "asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate =", "the end of the scene # First, get the position and velocity at", "None self.height = None self.radius = None self.startingRange1 = None self.startingRange2 = None", "method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset)", "point if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1]", "baselines at bottom of master image') masterTime = [self.masterFrame.getSensingStop() - datetime.timedelta(seconds=2.0), self.masterFrame.getSensingStop() -", "10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf", "for j in range(size)] for j in range(size): for i in range(size): M[j][i]", "self.startingRange2 = range return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def", "a new start time relativeSlaveTime = slaveTime[i] - datetime.timedelta(seconds=(z_offset/normV)) slaveSV = self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite')", "frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid", "get the position and velocity at the start of the scene self.logger.info(\"Sampling time", "in self.inputPorts: port() lookVector = self.calculateLookVector() az_offset = [] vb = [] hb", "None self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None self.lookAngle = None", "master image. '+ 'To be used in case there is a large shift", "= [] s = [0.,0.,0.] if self.baselineLocation.lower() == 'all': print('Using entire span of", "U.S. export laws and regulations. # The user has the responsibility to obtain", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "== 3): print(\"Error. Expecting input vectors of length 3.\") raise Exception Y =", "return None def __str__(self): retstr = \"Initial Baseline estimates \\n\" retstr += \"Cross-track", "has been classified as 'EAR99 NLR' # (No [Export] License Required except when", "None def __str__(self): retstr = \"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline:", "the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity()", "self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 -", "__init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self): return self.x1 def setVelocityVector(self,v):", "_timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0 * 3600) * 10**6)", "self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): #", "= MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the \"velocity\" component that is", "c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name = 'BASELINE_LOCATION', default =", "at the start of the scene, mid-scene, and the end of the scene", "= s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite') slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1", "c = MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform position and", "setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range):", "baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of", "self.calculateLookVector() az_offset = [] vb = [] hb = [] csb = []", "datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for i in range(3): # Calculate the Baseline", "= None self.rangeOffset = None self.phaseConst = -99999 super(Baseline, self).__init__(family=self.__class__.family, name=name) self.logger =", "an orbit and a time, calculate an orthogonal basis for cross-track and velocity", "[] hb = [] csb = [] asb = [] s = [0.,0.,0.]", "self.rangeOffset def getPhaseConst(self): return self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds", "getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self):", "self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to the baseline", "def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf =", "crossTrackBaselinePolynomialCoefficients = self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross", "This software is subject to # U.S. export control laws and regulations and", "# First, get the position and velocity at the start of the scene", "RESERVED. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "# embargoed foreign country or citizen of those countries. # # Author: <NAME>", "= crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range offsets azb_avg = (az_offset[0]", "return (td.microseconds + (td.seconds + td.days * 24.0 * 3600) * 10**6) /", "images.') ) class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list = (BASELINE_LOCATION,)", "# you may not use this file except in compliance with the License.", "in range(len(x1))] # Calculate the difference between the master and slave position vectors", "return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0 *", "in range(size): for i in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv", "def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def", "[] return None def createPorts(self): # Set input ports # It looks like", "else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid()", "azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0] +", "governing permissions and # limitations under the License. # # United States Government", "self.polynomialFit(s,hb) verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and", "#UAVSAR frame creates ellipsoid with peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except:", "= frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit =", "be required before exporting this software to any 'EAR99' # embargoed foreign country", "def setVelocityVector(self,v): self.x2 = v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 =", "def getVBaselineAcc(self): return self.vBaselineAcc def getPBaselineTop(self): return self.pBaselineTop def getPBaselineBottom(self): return self.pBaselineBottom def", "authority as may be required before exporting this software to any 'EAR99' #", "setRadius(self,radius): self.radius = radius return def setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range):", "Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr = \"Initial", "before exporting this software to any 'EAR99' # embargoed foreign country or citizen", "self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop = None self.vBaselineRate = None self.vBaselineAcc", "been classified as 'EAR99 NLR' # (No [Export] License Required except when exporting", "StateVector # A class to hold three-dimensional basis vectors class Basis(object): def __init__(self):", "and the two prfs # These provide the orbits # These provide the", "= MM.normalizeVector(x1) # Turn the position vector into a unit vector v =", "frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit()", "further refactored into a method that calculates the baseline between #TODO frames when", "software, # the user agrees to comply with all applicable U.S. export laws", "doc = ('Location at which to compute baselines - \"all\" implies '+ 'top,", "self.radius) # print('Range: ', self.startingRange1) # print('COSL: ', cosl) sinl = math.sqrt(1 -", "self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate", "License for the specific language governing permissions and # limitations under the License.", "self.logger.info(\"Sampling time %s\" % i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate", "the distance moved since the last baseline point if (i > 0): deltaT", "# Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2]", "azimuth pixel sizes # the two starting ranges, a planet, and the two", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)]", "= verticalBaselinePolynomialCoefficients[2] self.pBaselineTop = csb[0] self.pBaselineBottom = csb[-1] self.orbSlcAzimuthOffset = az_offset self.orbSlcRangeOffset =", "* 24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject()", "vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 =", "language governing permissions and # limitations under the License. # # United States", "== 'top': print('Estimating baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart()", "+ datetime.timedelta(seconds=1.0)] elif self.baselineLocation.lower() == 'top': print('Estimating baselines at top of master image')", "# Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0]", "import StateVector # A class to hold three-dimensional basis vectors class Basis(object): def", "# Turn the position vector into a unit vector v = MM.normalizeVector(v) #", "Exception Y = [0]*size A = [0]*size M = [[0 for i in", "addMasterFrame(self): frame = self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency()", "of baseline change over time. for port in self.inputPorts: port() lookVector = self.calculateLookVector()", "from iscesys.Component.Component import Component, Port from isceobj.Util.mathModule import MathModule as MM from isceobj.Orbit.Orbit", "gross azimuth and range offsets azb_avg = (az_offset[0] + az_offset[-1])/2.0 asb_avg = (asb[0]", "times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort)", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "in writing, software # distributed under the License is distributed on an \"AS", "These provide the orbits # These provide the range and azimuth pixel sizes,", "# United States Government Sponsorship acknowledged. This software is subject to # U.S.", "M = [[0 for i in range(size) ] for j in range(size)] for", "return basis # Given two position vectors and a basis, calculate the offset", "prohibited end use). By downloading this software, # the user agrees to comply", "frames when given a master time and a slave time and a method", "user agrees to comply with all applicable U.S. export laws and regulations. #", "difference between the master and slave position vectors z_offset = MM.dotProduct(dx,basis.getVelocityVector()) # Calculate", "def setHeight(self,var): self.height = float(var) return def setRadius(self,radius): self.radius = radius return def", "prfs # These provide the orbits # These provide the range and azimuth", "peg self.radius = ellipsoid.pegRadCur self.height = frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius =", "Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component import Component,", "(BASELINE_LOCATION,) # Calculate the Look Angle of the master frame def calculateLookAngle(self): lookVector", "agrees to comply with all applicable U.S. export laws and regulations. # The", "the horizontal and vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] -", "or cross-track vector c = MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the", "MM.crossProduct(c,r) # Calculate a the \"velocity\" component that is perpendicular to the cross-track", "(z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) az_offset.append(z_offset) # Save the position offset # Calculate a new", "print('Estimating baselines around center of master image') masterTime = [self.masterFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingMid(),", "def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height = float(var) return def", "Sponsorship acknowledged. This software is subject to # U.S. export control laws and", "This could be further refactored into a method that calculates the baseline between", "sv.getVelocity() normV = MM.norm(v) return normV # Given an orbit and a time,", "(z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and", "return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector of the master frame def calculateLookVector(self):", "baseline(self): #TODO This could be further refactored into a method that calculates the", "Turn the velocity vector into a unit vector c = MM.crossProduct(r,v) # Calculate", "limitations under the License. # # United States Government Sponsorship acknowledged. This software", "three-dimensional basis vectors for spacecraft baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x):", "for i in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return A", "position and velocity, this is the c, or cross-track vector c = MM.normalizeVector(c)", "self.hBaselineAcc def getVBaselineTop(self): return self.vBaselineTop def getVBaselineRate(self): return self.vBaselineRate def getVBaselineAcc(self): return self.vBaselineAcc", "# The user has the responsibility to obtain export licenses, or other export", "= self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV = self.masterOrbit.interpolateOrbit(masterTime[i], method='hermite')", "2.0 (the \"License\"); # you may not use this file except in compliance", "country, # end user, or in support of a prohibited end use). By", "__init__(self, name=''): self.masterOrbit = None self.slaveOrbit = None self.masterFrame = None self.slaveFrame =", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "vector into a unit vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular", "calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j] - x1[j]) for j in range(len(x1))] # Calculate the", "orbits, a time, range and azimuth pixel sizes # the two starting ranges,", "= \"Initial Baseline estimates \\n\" retstr += \"Cross-track Baseline: %s\\n\" retlst = (self.hBaselineTop,)", "self.x2 = [] self.x3 = [] # A class to hold three-dimensional basis", "'\"bottom\" implies at bottom of master image, '+ '\"middle\" implies near middle of", "# Given an orbit and a time, calculate an orthogonal basis for cross-track", "# # Unless required by applicable law or agreed to in writing, software", "calculate the rate of baseline change over time. for port in self.inputPorts: port()", "express or implied. # See the License for the specific language governing permissions", "MM.normalizeVector(c) v = MM.crossProduct(c,r) # Calculate a the \"velocity\" component that is perpendicular", "looks like we really need two orbits, a time, range and azimuth pixel", "vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1] - vb[i]*lookVector[0]) #Calculating baseline", "= crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop = verticalBaselinePolynomialCoefficients[0] self.vBaselineRate = verticalBaselinePolynomialCoefficients[1] self.vBaselineAcc =", "direction and position basis = BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given", "self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid = frame._ellipsoid #UAVSAR frame creates ellipsoid", "r_offset = (self.startingRange1 - self.startingRange2 - asb_avg)/(self.rangePixelSize) # Populate class attributes self.hBaselineTop =", "unit vector c = MM.crossProduct(r,v) # Calculate the vector perpendicular to the platform", "frame def calculateLookAngle(self): lookVector = self.calculateLookVector() return math.degrees(math.atan2(lookVector[1],lookVector[0])) # Calculate the look vector", "either express or implied. # See the License for the specific language governing", "self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days * 24.0 * 3600)", "given a master time and a slave time and a method that calls", "A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize):", "satellite heights and times for the first lines masterFramePort = Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort =", "MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between two frames def baseline(self):", "= self.calculateLookVector() az_offset = [] vb = [] hb = [] csb =", "# Calculate the difference between the master and slave position vectors z_offset =", "v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components", "retlst += (self.orbSlcAzimuthOffset,) retstr += \"Bulk Range Offset: %s\\n\" retlst += (self.orbSlcRangeOffset,) return", "self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None self.phaseConst = -99999 super(Baseline,", "i in range(3): # Calculate the Baseline at the start of the scene,", "the License. # You may obtain a copy of the License at #", "in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize):", "self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(),", "self._inputPorts.getPort(name='masterFrame').getObject() self.masterFrame = frame self.startingRange1 = frame.getStartingRange() prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize()", "NLR' # (No [Export] License Required except when exporting to an embargoed country,", "= frame.platformHeight except: ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp() self.radius = ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def", "j in range(size): A[i] += MInv[i][j]*Y[j] return A def setRangePixelSize(self,pixelSize): self.rangePixelSize = pixelSize", "an embargoed country, # end user, or in support of a prohibited end", "self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None self.vBaselineTop", "v = sv.getVelocity() r = MM.normalizeVector(x1) # Turn the position vector into a", "the range and azimuth pixel sizes, starting ranges, # satellite heights and times", "> 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] + deltaT*normV masterSV", "self.startingRange2 = frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit", "# authority as may be required before exporting this software to any 'EAR99'", "the rate of baseline change over time. for port in self.inputPorts: port() lookVector", "name=name) self.logger = logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {}", "(asb[0] + asb[-1])/2.0 az_offset = (-azb_avg - h_rate*self.startingRange1*lookVector[1])/(self.azimuthPixelSize) r_offset = (self.startingRange1 - self.startingRange2", "# end user, or in support of a prohibited end use). By downloading", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "Populate class attributes self.hBaselineTop = crossTrackBaselinePolynomialCoefficients[0] self.hBaselineRate = crossTrackBaselinePolynomialCoefficients[1] self.hBaselineAcc = crossTrackBaselinePolynomialCoefficients[2] self.vBaselineTop", "<NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging from iscesys.Component.Component import Component, Port", "slaveSV = self.slaveOrbit.interpolateOrbit(slaveTime[i], method='hermite') x1 = masterSV.getPosition() x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis)", "the vector perpendicular to the platform position and velocity, this is the c,", "pixel sizes, starting ranges, # satellite heights and times for the first lines", "= None self.slaveOrbit = None self.masterFrame = None self.slaveFrame = None self.lookAngle =", "of length 3.\") raise Exception Y = [0]*size A = [0]*size M =", "# limitations under the License. # # United States Government Sponsorship acknowledged. This", "two starting ranges, a planet, and the two prfs # These provide the", "software to any 'EAR99' # embargoed foreign country or citizen of those countries.", "the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables = {} self.descriptionOfVariables = {} self.mandatoryVariables", "= MM.invertMatrix(M) for i in range(size): for j in range(size): A[i] += MInv[i][j]*Y[j]", "in case there is a large shift between images.') ) class Baseline(Component): family", "basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors and a basis, calculate", "= MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset # Calculate the baseline components between", "Technology. ALL RIGHTS RESERVED. # # Licensed under the Apache License, Version 2.0", "# Turn the velocity vector into a unit vector c = MM.crossProduct(r,v) #", "difference in position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector())", "prf = frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite')", "<gh_stars>1-10 #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED. #", "polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting", "= ellipsoid.get_a() self.height = midSV.calculateHeight(ellipsoid) def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame", "frames def baseline(self): #TODO This could be further refactored into a method that", "between images.') ) class Baseline(Component): family = 'baseline' logging_name = 'isce.mroipac.baseline' parameter_list =", "= frame.getStartingRange() self.slaveOrbit = frame.getOrbit() def __init__(self, name=''): self.masterOrbit = None self.slaveOrbit =", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "case there is a large shift between images.') ) class Baseline(Component): family =", "position and the \"velocity\" component v_offset = MM.dotProduct(dx,basis.getPositionVector()) c_offset = MM.dotProduct(dx,basis.getCrossTrackVector()) return z_offset,v_offset,c_offset", "velocity, this is the c, or cross-track vector c = MM.normalizeVector(c) v =", "self.mandatoryVariables = [] self.optionalVariables = [] return None def createPorts(self): # Set input", "math.sqrt(1 - cosl*cosl) return [cosl,sinl] # Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time):", "'EAR99 NLR' # (No [Export] License Required except when exporting to an embargoed", "those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import logging", "implies at bottom of master image, '+ '\"middle\" implies near middle of master", "on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition()", "calculateLookVector(self): try: z = self.masterFrame.terrainHeight except: z = 0.0 cosl = ((self.height-z)*(2*self.radius +", "# Save the position offset # Calculate a new start time relativeSlaveTime =", "def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') x1 = sv.getPosition() v = sv.getVelocity() r", "i in range(size) ] for j in range(size)] for j in range(size): for", "verticalBaselinePolynomialCoefficients = self.polynomialFit(s,vb) h_rate = crossTrackBaselinePolynomialCoefficients[1] # Calculate the gross azimuth and range", "of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ import math import datetime import", "= r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic fit to", "self.orbSlcAzimuthOffset def getOrbSlcRangeOffset(self): return self.orbSlcRangeOffset def getRangeOffset(self): return self.rangeOffset def getPhaseConst(self): return self.phaseConst", "self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize = midSV.getScalarVelocity()/prf try: ellipsoid =", "the orbits # These provide the range and azimuth pixel sizes, starting ranges,", "exporting to an embargoed country, # end user, or in support of a", "for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around", "a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef): size = len(xRef) if", "# Multiply the horizontal and vertical baseline components by the look angle vector", "BaselineBasis() basis.setPositionVector(r) basis.setVelocityVector(v) basis.setCrossTrackVector(c) return basis # Given two position vectors and a", "orthogonal basis for cross-track and velocity directions # based on the spacecraft position", "self.masterFrame.getSensingStop() - datetime.timedelta(seconds=1.0), self.masterFrame.getSensingStop()] else: raise Exception('Unknown baseline location: {0}'.format(self.baselineLocation)) slaveTime = [self.slaveFrame.getSensingMid()", "and a time, calculate an orthogonal basis for cross-track and velocity directions #", "% i) masterBasis = self.calculateBasis(self.masterOrbit,masterTime[i]) normV = self.calculateScalarVelocity(self.masterOrbit,masterTime[i]) # Calculate the distance moved", "+= (self.pBaselineTop,) retstr += \"Bulk Azimuth Offset: %s\\n\" retlst += (self.orbSlcAzimuthOffset,) retstr +=", "len(xRef) if not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of same length.\")", "None self.startingRange2 = None self.hBaselineTop = None self.hBaselineRate = None self.hBaselineAcc = None", "if (i > 0): deltaT = self._timeDeltaToSeconds(masterTime[i] - masterTime[0]) s[i] = s[i-1] +", "in range(size): M[j][i] = math.pow(xRef[j],i) Y[j] = yRef[j] MInv = MM.invertMatrix(M) for i", "None self.pBaselineBottom = None self.orbSlcAzimuthOffset = None self.orbSlcRangeOffset = None self.rangeOffset = None", "'To be used in case there is a large shift between images.') )", "self.slaveOrbit.interpolateOrbit(relativeSlaveTime, method='hermite') # Recalculate the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset)", "except in compliance with the License. # You may obtain a copy of", "the offsets x2 = slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1])", "az_offset self.orbSlcRangeOffset = r_offset self.rangeOffset = self.startingRange1 - self.startingRange2 # Calculate a quadratic", "= logging.getLogger('isce.mroipac.baseline') self.createPorts() # Satisfy the old Component self.dictionaryOfOutputVariables = {} self.dictionaryOfVariables =", "return def getHBaselineTop(self): return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc", "setMasterStartingRange(self,range): self.startingRange1 = range return def setSlaveStartingRange(self,range): self.startingRange2 = range return def getHBaselineTop(self):", "starting ranges, # satellite heights and times for the first lines masterFramePort =", "embargoed foreign country or citizen of those countries. # # Author: <NAME> #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~", "= [] self.optionalVariables = [] return None def createPorts(self): # Set input ports", "self.rangePixelSize = pixelSize return def setAzimuthPixelSize(self,pixelSize): self.azimuthPixelSize = pixelSize return def setHeight(self,var): self.height", "velocity directions # based on the spacecraft position def calculateBasis(self,orbit,time): sv = orbit.interpolateOrbit(time,", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "License. # # United States Government Sponsorship acknowledged. This software is subject to", "= Port(name='masterFrame',method=self.addMasterFrame) slaveFramePort = Port(name='slaveFrame',method=self.addSlaveFrame) self._inputPorts.add(masterFramePort) self._inputPorts.add(slaveFramePort) return None def __str__(self): retstr =", "+ td.days * 24.0 * 3600) * 10**6) / 10**6 def addMasterFrame(self): frame", "return self.hBaselineTop def getHBaselineRate(self): return self.hBaselineRate def getHBaselineAcc(self): return self.hBaselineAcc def getVBaselineTop(self): return", "= v def getVelocityVector(self): return self.x2 def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self):", "time, calculate an orthogonal basis for cross-track and velocity directions # based on", "may be required before exporting this software to any 'EAR99' # embargoed foreign", "baselines class BaselineBasis(Basis): def __init__(self): Basis.__init__(self) def setPositionVector(self,x): self.x1 = x def getPositionVector(self):", "self.phaseConst def getLookAngle(self): return self.lookAngle def _timeDeltaToSeconds(self,td): return (td.microseconds + (td.seconds + td.days", "rate of baseline change over time. for port in self.inputPorts: port() lookVector =", "- self.startingRange2 # Calculate a quadratic fit to the baseline polynomial def polynomialFit(self,xRef,yRef):", "mandatory=False, doc = ('Location at which to compute baselines - \"all\" implies '+", "= [self.slaveFrame.getSensingMid() - datetime.timedelta(seconds=1.0), self.slaveFrame.getSensingMid(), self.slaveFrame.getSensingMid() + datetime.timedelta(seconds=1.0)] # slaveTime = [self.slaveFrame.getSensingStart(),self.slaveFrame.getSensingMid(),self.slaveFrame.getSensingStop()] for", "has the responsibility to obtain export licenses, or other export # authority as", "print('Using entire span of image for estimating baselines') masterTime = [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower()", "orbit.interpolateOrbit(time, method='hermite') v = sv.getVelocity() normV = MM.norm(v) return normV # Given an", "[] csb = [] asb = [] s = [0.,0.,0.] if self.baselineLocation.lower() ==", "= [self.masterFrame.getSensingStart(),self.masterFrame.getSensingMid(),self.masterFrame.getSensingStop()] elif self.baselineLocation.lower() == 'middle': print('Estimating baselines around center of master image')", "# Calculate the scalar spacecraft velocity def calculateScalarVelocity(self,orbit,time): sv = orbit.interpolateOrbit(time, method='hermite') v", "in range(size) ] for j in range(size)] for j in range(size): for i", "offset between the two positions in this basis def calculateBasisOffset(self,x1,x2,basis): dx = [(x2[j]", "= sv.getVelocity() normV = MM.norm(v) return normV # Given an orbit and a", "frame.getInstrument().getPulseRepetitionFrequency() self.rangePixelSize = frame.getInstrument().getRangePixelSize() self.masterOrbit = frame.getOrbit() midSV = self.masterOrbit.interpolateOrbit(frame.getSensingMid(), method='hermite') self.azimuthPixelSize =", "Multiply the horizontal and vertical baseline components by the look angle vector asb.append(-hb[i]*lookVector[1]", "def addSlaveFrame(self): frame = self._inputPorts.getPort(name='slaveFrame').getObject() self.slaveFrame = frame self.startingRange2 = frame.getStartingRange() self.slaveOrbit =", "print('Estimating baselines at top of master image') masterTime = [self.masterFrame.getSensingStart(), self.masterFrame.getSensingStart() + datetime.timedelta(seconds=1.0),", "input vectors of length 3.\") raise Exception Y = [0]*size A = [0]*size", "%s\\n\" retlst += (self.vBaselineTop,) retstr += \"Perpendicular Baseline: %s\\n\" retlst += (self.pBaselineTop,) retstr", "to # U.S. export control laws and regulations and has been classified as", "not (len(xRef) == len(yRef)): print(\"Error. Expecting input vectors of same length.\") raise Exception", "= self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal and vertical", "def setCrossTrackVector(self,c): self.x3 = c def getCrossTrackVector(self): return self.x3 BASELINE_LOCATION = Component.Parameter('baselineLocation', public_name", "slaveSV.getPosition() (z_offset,v_offset,c_offset) = self.calculateBasisOffset(x1,x2,masterBasis) vb.append(v_offset) hb.append(c_offset) csb.append(-hb[i]*lookVector[0] + vb[i]*lookVector[1]) # Multiply the horizontal" ]
[ "range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK:", "cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return", "seed the static deck with the list of unique card integers. Each object", "for i in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def", "__init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def", "__str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the", "def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card deck", "def __init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards)", "the standard 52 card deck for rank in Card.STR_RANKS: for suit, val in", "Card class Deck: \"\"\" Class representing a deck. The first time we create,", "== 1: return self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw()) return", "create the standard 52 card deck for rank in Card.STR_RANKS: for suit, val", "random import shuffle as rshuffle from .card import Card class Deck: \"\"\" Class", "return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard", "n=1): if n == 1: return self.cards.pop(0) cards = [] for i in", "# and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n", "simply makes a copy of this object and shuffles it. \"\"\" _FULL_DECK =", "= Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1: return self.cards.pop(0) cards", "of unique card integers. Each object instantiated simply makes a copy of this", "card integers. Each object instantiated simply makes a copy of this object and", "if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card deck for rank", "cards = [] for i in range(n): cards.append(self.draw()) return cards def __str__(self): return", "\"\"\" Class representing a deck. The first time we create, we seed the", "draw(self, n=1): if n == 1: return self.cards.pop(0) cards = [] for i", "shuffles it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): # and", "Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52", "from .card import Card class Deck: \"\"\" Class representing a deck. The first", "\"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): # and then shuffle", "shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if", "we seed the static deck with the list of unique card integers. Each", "with the list of unique card integers. Each object instantiated simply makes a", "list of unique card integers. Each object instantiated simply makes a copy of", "the static deck with the list of unique card integers. Each object instantiated", "for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank + suit)) return", "shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1: return", "self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw()) return cards def __str__(self):", "the list of unique card integers. Each object instantiated simply makes a copy", "static deck with the list of unique card integers. Each object instantiated simply", "GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card deck for", "<filename>src/modules/deuces/deck.py from random import shuffle as rshuffle from .card import Card class Deck:", "first time we create, we seed the static deck with the list of", "rshuffle from .card import Card class Deck: \"\"\" Class representing a deck. The", "rshuffle(self.cards) def draw(self, n=1): if n == 1: return self.cards.pop(0) cards = []", "@staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card", "and shuffles it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): #", "list(Deck._FULL_DECK) # create the standard 52 card deck for rank in Card.STR_RANKS: for", "card deck for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank +", "object and shuffles it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self):", "return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK)", "integers. Each object instantiated simply makes a copy of this object and shuffles", ".card import Card class Deck: \"\"\" Class representing a deck. The first time", "copy of this object and shuffles it. \"\"\" _FULL_DECK = [] def __init__(self):", "= [] for i in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards)", "import shuffle as rshuffle from .card import Card class Deck: \"\"\" Class representing", "then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1:", "n == 1: return self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw())", "if n == 1: return self.cards.pop(0) cards = [] for i in range(n):", "time we create, we seed the static deck with the list of unique", "= [] def __init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards =", "of this object and shuffles it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle()", "cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) #", "unique card integers. Each object instantiated simply makes a copy of this object", "[] for i in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod", "we create, we seed the static deck with the list of unique card", "i in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck():", "_FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards", "1: return self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw()) return cards", "create, we seed the static deck with the list of unique card integers.", "[] def __init__(self): self.shuffle() def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck()", "and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n ==", "return list(Deck._FULL_DECK) # create the standard 52 card deck for rank in Card.STR_RANKS:", "a deck. The first time we create, we seed the static deck with", "self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1: return self.cards.pop(0)", "instantiated simply makes a copy of this object and shuffles it. \"\"\" _FULL_DECK", "self.shuffle() def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self,", "# create the standard 52 card deck for rank in Card.STR_RANKS: for suit,", "Class representing a deck. The first time we create, we seed the static", "return self.cards.pop(0) cards = [] for i in range(n): cards.append(self.draw()) return cards def", "it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def shuffle(self): # and then", "def draw(self, n=1): if n == 1: return self.cards.pop(0) cards = [] for", "Deck._FULL_DECK: return list(Deck._FULL_DECK) # create the standard 52 card deck for rank in", "representing a deck. The first time we create, we seed the static deck", "Each object instantiated simply makes a copy of this object and shuffles it.", "Deck: \"\"\" Class representing a deck. The first time we create, we seed", "deck. The first time we create, we seed the static deck with the", "shuffle as rshuffle from .card import Card class Deck: \"\"\" Class representing a", "a copy of this object and shuffles it. \"\"\" _FULL_DECK = [] def", "in range(n): cards.append(self.draw()) return cards def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if", "def __str__(self): return Card.print_pretty_cards(self.cards) @staticmethod def GetFullDeck(): if Deck._FULL_DECK: return list(Deck._FULL_DECK) # create", "object instantiated simply makes a copy of this object and shuffles it. \"\"\"", "from random import shuffle as rshuffle from .card import Card class Deck: \"\"\"", "Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1): if n == 1: return self.cards.pop(0) cards =", "this object and shuffles it. \"\"\" _FULL_DECK = [] def __init__(self): self.shuffle() def", "standard 52 card deck for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items():", "The first time we create, we seed the static deck with the list", "as rshuffle from .card import Card class Deck: \"\"\" Class representing a deck.", "class Deck: \"\"\" Class representing a deck. The first time we create, we", "52 card deck for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank", "def shuffle(self): # and then shuffle self.cards = Deck.GetFullDeck() rshuffle(self.cards) def draw(self, n=1):", "import Card class Deck: \"\"\" Class representing a deck. The first time we", "deck with the list of unique card integers. Each object instantiated simply makes", "deck for rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank + suit))", "rank in Card.STR_RANKS: for suit, val in Card.CHAR_SUIT_TO_INT_SUIT.items(): Deck._FULL_DECK.append(Card.new(rank + suit)) return list(Deck._FULL_DECK)", "makes a copy of this object and shuffles it. \"\"\" _FULL_DECK = []" ]
[ "the current scopeset if we do not already have on. :rtype: string or", "list): raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if token: #", "by # the user! Also, even if it has expired, we might still", "due to a server error or because the account or token was lacking", "if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile,", "that we will try to use for our webserver WEB_PORTS = [12345, 23456]", "that whenever this file is touched, the cache lock is held \"\"\" assert", "the account or token was lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise", "token to be removed from cache \"\"\" self.debug('Removing token %s from cache', uuid)", "start a new webserver on one of the WEB_PORTS, and then either show", "None # We did not have a valid token, now comes the hard", "import requests import sys from openidc_client import release # The ports that we", "to use POST submission of client secrets rather than Authorization header :kwarg client_id:", "client. :kwarg new_token: If True, we will actively request the user to get", "data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None", "was valid but your request failed due to a server error or because", "UUID. If the user cancelled (or we got another error), we will return", ":returns: True if the token was succesfully refreshed, False otherwise \"\"\" oldtoken =", "secret. :kwarg useragent: Useragent string to use. If not provided, defaults to \"python-openidc-client/VERSION\"", "(string, dict) or None :returns: Token UUID and contents or None if no", "not new_token: return None # We did not have a valid token, now", "if we do not already have on. :kwarg auto_refresh: If False, will not", "try: from urllib import urlencode except ImportError: from urllib.parse import urlencode from uuid", "your request failed due to a server error or because the account or", "kv = dict([v.split('=', 1) for v in split]) if 'error' in kv: self.debug('Error", "if we already have a token with the current app_identifier that has the", "in incorrect cases. :kwargs http_method: The HTTP method to use, defaults to POST..", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "import json import logging from threading import Lock import time try: from StringIO", "not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers' not", "anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires", "even if it has expired, we might still be able to # refresh", "% method) if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token", "with the specified scopes. The webserver will then need to catch the return", "reported an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result", "to catch the return with either an Authorization Code (that we will exchange", "method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not", "self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if not token: return", "unable to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {}", ":rtype: bool :returns: True if the token was succesfully refreshed, False otherwise \"\"\"", "return code in incorrect cases. :kwargs http_method: The HTTP method to use, defaults", "auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args,", "{'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes':", "else: self._retrieved_code = kv['code'] # Just return a message start_response('200 OK', [('Content-Type', 'text/plain')])", "'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data)", "token we can use to refresh the access token # scopes: A list", "%s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at':", "renew the token or delete the token. :kwarg scopes: A list of scopes", "dict. :rtype: string :returns: The IdP URL \"\"\" if method in self.idp_mapping: return", "'%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' % authz_url, file=self._printfd)", "token cache is a json serialized dict # This dict contains uuid: token", "_refresh_token(self, uuid): \"\"\"Tries to refresh a token and put the refreshed token in", "is True. :kwarg new_token: If True, we will actively request the user to", "self._retrieved_code is not None if self._retrieved_code is False: # The user cancelled the", "to permit persons to whom the Software is # furnished to do so,", "except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes:", "_cachefile(self): \"\"\"Property to get the cache file name for the current client. This", "= (uuid, token) if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method):", "'.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query'", "from the cache and writes cache to disk. cache_lock may not be held", "self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if", "Allarguments and keyword arguments are like the arguments to requests, except for `scopes`,", "= [12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens: # Every app", "== 401: # We got a 401 and this is a retry. Report", "We got a 401 and this is a retry. Report error self.report_token_issue() return", "and this permission notice shall be included in # all copies or substantial", "= False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the last token", "client. This assures that whenever this file is touched, the cache lock is", "with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return", "to get token with scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s',", "\"\"\"Function to retrieve tokens with specific scopes. This function will block until a", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the", "possible_token = None self.debug('Trying to get token with scopes %s', scopes) for uuid", "uuid, token # This is a token that may or may not still", ":returns: String bearer token if possible or None \"\"\" if not isinstance(scopes, list):", "resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got a 401", "in self._cache The caller is responsible for either removing the token if it", "an Authorization Code (that we will exchange for an access token) or the", "WEB_PORT.\"\"\" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server", "self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for", "__refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. Requires cache_lock to be", "server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code is False: # The", "report issue before requesting token') if self.problem_reported: # We were reported an issue", "from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del", "to automatically report token issues on 401. This helps with broken apps that", "self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires cache_lock to", "token = self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post: if 'json'", "the client_id. May be None if your IdP does not require you to", ":kwarg use_post: Whether to use POST submission of client secrets rather than Authorization", "WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This", "*args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments and keyword arguments are like", "resp: self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'],", "`new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes required for", "to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to store the token caches.", "disk. cache_lock may not be held by anyone. :param uuid: UUID of the", "return True def _get_server(self, app): \"\"\"This function returns a SimpleServer with an available", "a token to the cache and writes cache to disk. cache_lock may not", "lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting", "def _refresh_token(self, uuid): \"\"\"Tries to refresh a token and put the refreshed token", "new webserver on one of the WEB_PORTS, and then either show the user", "cache_lock may not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes", "be refreshable. :param scopes: List of scopes that need to be in the", "def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires cache_lock to be held", "no applicable tokens were found \"\"\" possible_token = None self.debug('Trying to get token", "or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None self._retrieved_code =", "ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh',", "one continue def _get_new_token(self, scopes): \"\"\"This function kicks off some magic. We will", "of scopes that we had requested with the token def __init__(self, app_identifier, id_provider,", "the required scopes, we will return it. This function will return a bearer", "return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the last token that was", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "the cache on disk. cache_lock may not be held by anyone.\"\"\" with self._cache_lock:", "if renewal was succesful. :param uuid: The UUID of the cached token to", "the requested method. :param method: The method name in the IdP mapping dict.", "your application that the token you sent was invalid, you should call it.", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "it. You should explicitly NOT call this function if the token was valid", "app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with", "no such tokens exist it will return the possibly expired token: it might", "refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data =", "Default is ~/.openidc. If this does not exist and we are unable to", "continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes']))", "will be thrown. :kwargs printfd: The File object to print token instructions to.", "(%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just return", "already have on. :rtype: string or None :returns: String bearer token if possible", "self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating", "uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] =", "def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for", "access token. If it did not work, we will return None and remove", "self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with specific", "_token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for", "resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True", "the new access token. If it did not work, we will return None", "indicates the token should still be valid, it may have been revoked by", "the token should still be valid, it may have been revoked by #", "\"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh =", "retrieved tokens :param id_provider: URL of the identity provider to get tokens from", "writes cache to disk. cache_lock may not be held by anyone. :param uuid:", "in ['POST']: raise ValueError('Cannot use POST tokens in %s method' % method) if", "self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. Requires cache_lock", "you sent was invalid, you should call it. You should explicitly NOT call", "a json dict with the following keys: # idp: The URL of the", "'POST') is_retry = False if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try", "cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache:", "token in self._cache The caller is responsible for either removing the token if", "token %s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "from urllib.parse import urlencode from uuid import uuid4 as uuidgen import webbrowser from", "found \"\"\" possible_token = None self.debug('Trying to get token with scopes %s', scopes)", "but if no such tokens exist it will return the possibly expired token:", "The caller is responsible for either removing the token if it could not", "Token UUID and contents or None if no applicable tokens were found \"\"\"", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "the return with either an Authorization Code (that we will exchange for an", "software and associated documentation files (the \"Software\"), to deal # in the Software", "URL of the idp that issued the token # sub: The subject that", "for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes", "resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif", "self._retrieved_code = None resp = resp.json() if 'error' in resp: self.debug('Error exchanging authorization", "implementation of tokens: # Every app id has its own token cache #", "to # refresh the token. # refresh_token: The token we can use to", "to the cache and writes cache to disk. cache_lock may not be held", "and to permit persons to whom the Software is # furnished to do", "kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just return a message", "a new token with the specified scopes. The webserver will then need to", "# furnished to do so, subject to the following conditions: # # The", "the Software, and to permit persons to whom the Software is # furnished", "server except socket.error: # This port did not work. Switch to next one", "'text/plain')]) return [u'You can close this window and return to the CLI'.encode('ascii')] self._retrieved_code", "If this does not exist and we are unable to create it, the", "# indicates the token should still be valid, it may have been revoked", "to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating", "authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' %", "gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens that have the", "= None self.debug('Trying to get token with scopes %s', scopes) for uuid in", "scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp']", "name in the IdP mapping dict. :rtype: string :returns: The IdP URL \"\"\"", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST',", "\"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=',", "token should still be valid, it may have been revoked by # the", "services relying on OpenID Connect. :param app_identifier: Identifier for storage of retrieved tokens", "401 and this is a retry. Report error self.report_token_issue() return resp else: return", "got another error), we will return None. \"\"\" def _token_app(environ, start_response): query =", "\"\"\" self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in", "not None if self._retrieved_code is False: # The user cancelled the request self._retrieved_code", "list of scopes that we had requested with the token def __init__(self, app_identifier,", "the cancellation message. This function will store the new token in the local", "token to be updated :param toupdate: Dict indicating which fields need to be", "not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to store", "~/.openidc. If this does not exist and we are unable to create it,", "return resp @property def _cachefile(self): \"\"\"Property to get the cache file name for", "'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST',", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp':", "if possible or None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must be", "socket.error: # This port did not work. Switch to next one continue def", "resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self,", "comes the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported", "= False else: self._retrieved_code = kv['code'] # Just return a message start_response('200 OK',", "client_id. May be None if your IdP does not require you to use", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "release # The ports that we will try to use for our webserver", "from openidc_client import release # The ports that we will try to use", "a server error or because the account or token was lacking specific permissions.", "to refresh a token and put the refreshed token in self._cache The caller", "supposed to still be valid, prefer it # over any others we have", "another error), we will return None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING']", "'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE", "a secret. :kwarg useragent: Useragent string to use. If not provided, defaults to", "we will return None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split =", "def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. cache_lock may not", "to be added to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s", "time, but if no such tokens exist it will return the possibly expired", "webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes)", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "True, we will actively request the user to get a new token with", "self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid", "if we do not already have on. :rtype: string or None :returns: String", "Useragent string to use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The", "self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the last token that was returned.", "be the Authorization endpoint of the IdP with a request for our client_id", "to deal # in the Software without restriction, including without limitation the rights", "to any person obtaining a copy # of this software and associated documentation", "IdP URL for the requested method. :param method: The method name in the", "a token is not present with this token, a new one will be", "were unable to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery =", "\"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if self.problem_reported:", "json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache", "def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments and keyword arguments", "% server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id']", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "return token[1]['access_token'] elif not new_token: return None # We did not have a", "return uuid, token # This is a token that may or may not", "return the possibly expired token: it might be refreshable. :param scopes: List of", "app) return server except socket.error: # This port did not work. Switch to", "= self._get_server(_token_app) if not server: raise Exception('We were unable to instantiate a webserver')", "uuid: token pairs # Every \"token\" object is a json dict with the", "server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] =", "from urllib import urlencode except ImportError: from urllib.parse import urlencode from uuid import", "In that case, you will want to call report_token_issue() to try to renew", "this does not exist and we are unable to create it, the OSError", "os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as", "current app_identifier that has the required scopes, we will return it. This function", ":kwarg client_secret: The client \"secret\" that goes with the client_id. May be None", "from cache \"\"\" self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if", "method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a token and put the refreshed", "with specific scopes. This function will block until a token is retrieved if", "self.debug('User cancelled') return None self.debug('We got an authorization code!') data = {'client_id': self.client_id,", "this is a retry. Report error self.report_token_issue() return resp else: return resp @property", "the idp that issued the token # sub: The subject that owns the", "in the IdP mapping dict. :rtype: string :returns: The IdP URL \"\"\" if", "revoked by the user or expired. In that case, you will want to", "uuid import uuid4 as uuidgen import webbrowser from wsgiref import simple_server import requests", "= id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s'", "refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args,", "valid but your request failed due to a server error or because the", "this software and associated documentation files (the \"Software\"), to deal # in the", "current client. This assures that whenever this file is touched, the cache lock", "The Client Identifier used to request credentials :kwarg client_secret: The client \"secret\" that", "credentials :kwarg client_secret: The client \"secret\" that goes with the client_id. May be", "POST submission of client secrets rather than Authorization header :kwarg client_id: The Client", "self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token: return None", "have a token with the current app_identifier that has the required scopes, we", "be requested unless nonblocking is True. :kwarg new_token: If True, we will actively", "new_token: return None # We did not have a valid token, now comes", "server error or because the account or token was lacking specific permissions. \"\"\"", "If we had a valid token, use that self.last_returned_uuid = token[0] self.problem_reported =", "self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function returns a", "if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method): \"\"\"Returns the IdP", "granted, free of charge, to any person obtaining a copy # of this", "to retrieve tokens with specific scopes. This function will block until a token", "that was last returned. If that worked, we will return the new access", "Code (that we will exchange for an access token) or the cancellation message.", "be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache", "but your request failed due to a server error or because the account", "`scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes required", "import simple_server import requests import sys from openidc_client import release # The ports", "1) for v in split]) if 'error' in kv: self.debug('Error code returned: %s", "remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid)", "the current scopeset if we do not already have on. :kwarg auto_refresh: If", "to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post =", "app): \"\"\"This function returns a SimpleServer with an available WEB_PORT.\"\"\" for port in", "the IdP with a request for our client_id to get a new token", "of scopes that need to be in the returned token :rtype: (string, dict)", "will return None and remove this token from the cache. If you get", "# Permission is hereby granted, free of charge, to any person obtaining a", "urllib import urlencode except ImportError: from urllib.parse import urlencode from uuid import uuid4", "id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' %", "with this token, a new one will be requested unless nonblocking is True.", "not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "= id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent =", "self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed", "uuid4 as uuidgen import webbrowser from wsgiref import simple_server import requests import sys", "resp.status_code == 401: # We got a 401 and this is a retry.", "will return the possibly expired token: it might be refreshable. :param scopes: List", "function will return a bearer token or None. Note that the bearer token", "self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue", "to still be valid, prefer it # over any others we have self.debug('Not", "still be able to # refresh the token. # refresh_token: The token we", "'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers'", "relying on OpenID Connect for authentication.\"\"\" from __future__ import print_function from copy import", "self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST", "def _delete_token(self, uuid): \"\"\"Removes a token from the cache and writes cache to", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f:", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "**kwargs): \"\"\"Make an python-requests POST request. Allarguments and keyword arguments are like the", "than Authorization header :kwarg client_id: The Client Identifier used to request credentials :kwarg", "token to attempt to refresh. :rtype: bool :returns: True if the token was", "the token or delete the token. :kwarg scopes: A list of scopes required", "port did not work. Switch to next one continue def _get_new_token(self, scopes): \"\"\"This", "to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope']", "return the new access token. If it did not work, we will return", "to use a secret. :kwarg useragent: Useragent string to use. If not provided,", "this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return", "has expired, we might still be able to # refresh the token. #", "token was valid but your request failed due to a server error or", "time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE", "token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing", "# Every app id has its own token cache # The token cache", "is_retry = False if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try =", "token') if self.problem_reported: # We were reported an issue before. Let's just remove", "None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp,", "uuid def _delete_token(self, uuid): \"\"\"Removes a token from the cache and writes cache", "a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = '", "be None if your IdP does not require you to use a secret.", "the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid) with", "updated :param toupdate: Dict indicating which fields need to be updated \"\"\" self.debug('Updating", "copies of the Software, and to permit persons to whom the Software is", "it to the valid cache, and then return the UUID. If the user", "uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the", "URL, or if possible, kick off their browser. This URL will be the", "if self.problem_reported: # We were reported an issue before. Let's just remove this", "a post call') if method not in ['POST']: raise ValueError('Cannot use POST tokens", "import urlencode except ImportError: from urllib.parse import urlencode from uuid import uuid4 as", "ports that we will try to use for our webserver WEB_PORTS = [12345,", "is responsible for either removing the token if it could not be refreshed", "call it. You should explicitly NOT call this function if the token was", "\"\"\"Tries to refresh a token and put the refreshed token in self._cache The", "to get a new token with the current scopeset if we do not", "\"Bearer\" # expires_at: Token expiration UTC time. NOTE: Even if the expires_at #", "uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid):", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "on. :rtype: string or None :returns: String bearer token if possible or None", "http_method: The HTTP method to use, defaults to POST.. \"\"\" ckwargs = copy(kwargs)", "on one of the WEB_PORTS, and then either show the user a URL,", "requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None,", "cache on disk. cache_lock may not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache()", "also lock across # multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache", "scopes. It will prefer to return tokens whose expires_at is still before the", "that issued the token # sub: The subject that owns the token #", "Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self, scopes,", "caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not", "IdP URL \"\"\" if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return", "Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not", "return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'),", "you should call it. You should explicitly NOT call this function if the", "requested unless nonblocking is True. :kwarg new_token: If True, we will actively request", "get a new token with the specified scopes. The webserver will then need", "resp @property def _cachefile(self): \"\"\"Property to get the cache file name for the", "If False, will not try to automatically report token issues on 401. This", "False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']})", "code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret:", "cache. If you get an indication from your application that the token you", "will try to use for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object):", "resp.json() if 'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None", "will attempt to renew the token that was last returned. If that worked,", "True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments", "time try: from StringIO import StringIO except ImportError: from io import StringIO import", "%i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk.", "user! Also, even if it has expired, we might still be able to", "by anyone. :param token: Dict of the token to be added to the", "# idp: The URL of the idp that issued the token # sub:", "of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is a", "from copy import copy import json import logging from threading import Lock import", "function will block until a token is retrieved if requested. It is always", "401. This helps with broken apps that may send a 401 return code", "scopes: A list of scopes required for the current client. :kwarg new_token: If", "self._cache from the cache on disk. cache_lock may not be held by anyone.\"\"\"", "any person obtaining a copy # of this software and associated documentation files", "you will want to call report_token_issue() to try to renew the token or", "explicitly NOT call this function if the token was valid but your request", "You should explicitly NOT call this function if the token was valid but", "an python-requests POST request. Allarguments and keyword arguments are like the arguments to", "self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable to refresh,", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "tokens in %s method' % method) if 'data' not in ckwargs: ckwargs['data'] =", "a copy # of this software and associated documentation files (the \"Software\"), to", "revoked by # the user! Also, even if it has expired, we might", "lock across # multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache =", "token else: if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer", "the user a URL, or if possible, kick off their browser. This URL", "return tokens whose expires_at is still before the current time, but if no", "with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to", "dict) or None :returns: Token UUID and contents or None if no applicable", "not present with this token, a new one will be requested unless nonblocking", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "(the \"Software\"), to deal # in the Software without restriction, including without limitation", "= 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] =", "for uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] !=", "of client secrets rather than Authorization header :kwarg client_id: The Client Identifier used", "token that may or may not still be valid self.debug('Possible') possible_token = (uuid,", "charge, to any person obtaining a copy # of this software and associated", "will return it. This function will return a bearer token or None. Note", "path for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a token", "use POST submission of client secrets rather than Authorization header :kwarg client_id: The", "'Bearer %s' % token resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401", "this function if the token was valid but your request failed due to", "one of the WEB_PORTS, and then either show the user a URL, or", "cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock:", "and put the refreshed token in self._cache The caller is responsible for either", "if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code", "be updated :param toupdate: Dict indicating which fields need to be updated \"\"\"", "rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "the cache file name for the current client. This assures that whenever this", "token) or the cancellation message. This function will store the new token in", "return possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL for the requested method.", "self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None self._retrieved_code = None #", "Report error self.report_token_issue() return resp else: return resp @property def _cachefile(self): \"\"\"Property to", "client_secret self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or", "serialized dict # This dict contains uuid: token pairs # Every \"token\" object", "self.token_to_try = None self._retrieved_code = None # TODO: Make cache_lock a filesystem lock", "{} ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs: ckwargs['headers'] = {}", "we have self.debug('Not yet expired, returning') return uuid, token # This is a", "not include path for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries to refresh", "\"\"\" uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache()", "a new webserver on one of the WEB_PORTS, and then either show the", "valid token, now comes the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid", "_get_server(self, app): \"\"\"This function returns a SimpleServer with an available WEB_PORT.\"\"\" for port", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "if the token was valid but your request failed due to a server", "the token that was last returned. If that worked, we will return the", "if self._retrieved_code is False: # The user cancelled the request self._retrieved_code = None", "Even if the expires_at # indicates the token should still be valid, it", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "\"\"\"Wirtes self._cache to cache on disk. Requires cache_lock to be held by caller.\"\"\"", "We did not have a valid token, now comes the hard part... uuid", "a token that may or may not still be valid self.debug('Possible') possible_token =", "to use for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal", "for specific endpoints on the IdP. :kwarg use_post: Whether to use POST submission", "bearer token or None. Note that the bearer token might have been revoked", "valid, prefer it # over any others we have self.debug('Not yet expired, returning')", "just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result:", "if requested. It is always safe to call this though, since if we", "None. Note that the bearer token might have been revoked by the user", "environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for v in split]) if", "'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app):", "resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in", "new_token: If True, we will actively request the user to get a new", "in the cache. cache_lock may not be held by anyone. :param token: UUID", "set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is a token that's", "that was returned. This will attempt to renew the token that was last", "value # token_type: Token type. Currently supported: \"Bearer\" # expires_at: Token expiration UTC", "# # Permission is hereby granted, free of charge, to any person obtaining", "socket import os try: from urllib import urlencode except ImportError: from urllib.parse import", "from :param id_provider_mapping: Mapping with URLs to use for specific endpoints on the", "token with the current scopeset if we do not already have on. :kwarg", "DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications relying on OpenID Connect for", "object to print token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug", "by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk.", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications", "if token['expires_at'] < time.time(): # This is a token that's supposed to still", "as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i", "if 'error' in resp: self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token(", "if your IdP does not require you to use a secret. :kwarg useragent:", "= self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code ==", "others we have self.debug('Not yet expired, returning') return uuid, token # This is", "use_post: Whether to use POST submission of client secrets rather than Authorization header", "browser. This URL will be the Authorization endpoint of the IdP with a", "use for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation", "for an access token) or the cancellation message. This function will store the", "'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} #", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "client_secret: The client \"secret\" that goes with the client_id. May be None if", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "OpenIDCClient(object): # Internal implementation of tokens: # Every app id has its own", "include path for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a", "open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self):", "exchange for an access token) or the cancellation message. This function will store", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "submission of client secrets rather than Authorization header :kwarg client_id: The Client Identifier", "have been revoked by the user or expired. In that case, you will", "for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server except", "and remove this token from the cache. If you get an indication from", "return None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv", "of the Software, and to permit persons to whom the Software is #", ":kwarg useragent: Useragent string to use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg", "cached token to attempt to refresh. :rtype: bool :returns: True if the token", "the token # access_token: Token value # token_type: Token type. Currently supported: \"Bearer\"", "to a server error or because the account or token was lacking specific", "code in incorrect cases. :kwargs http_method: The HTTP method to use, defaults to", "an available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app)", "self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code", "to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid)", "id has its own token cache # The token cache is a json", "expires_at # indicates the token should still be valid, it may have been", "method: The method name in the IdP mapping dict. :rtype: string :returns: The", "be added to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s to", "to cache on disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked()", "simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This port did not work.", "on disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache')", "self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp =", "scopes: A list of scopes that we had requested with the token def", "self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request(", "self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the last", "with the following keys: # idp: The URL of the idp that issued", "os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None self._retrieved_code", "kv['code'] # Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token']", "# This port did not work. Switch to next one continue def _get_new_token(self,", "= dict([v.split('=', 1) for v in split]) if 'error' in kv: self.debug('Error code", "removing the token if it could not be refreshed or saving the cache", "token: UUID of the token to be updated :param toupdate: Dict indicating which", "cache, and then return the UUID. If the user cancelled (or we got", "new_token=new_token) if not token: return None if self.use_post: if 'json' in ckwargs: raise", "cache # The token cache is a json serialized dict # This dict", "token with scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token", "printfd=sys.stdout): \"\"\"Client for interacting with web services relying on OpenID Connect. :param app_identifier:", "cache_lock a filesystem lock so we also lock across # multiple invocations self._cache_lock", "returned. This will attempt to renew the token that was last returned. If", "return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token in the cache. cache_lock", "_refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. cache_lock may not be", "can close this window and return to the CLI'.encode('ascii')] self._retrieved_code = None server", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function", "'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request(", "This helps with broken apps that may send a 401 return code in", "= ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry = True token =", "token pairs # Every \"token\" object is a json dict with the following", "**ckwargs) if resp.status_code == 401 and not is_retry: if not auto_refresh: return resp", "this token, a new one will be requested unless nonblocking is True. :kwarg", "= 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query =", "if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if", "Red Hat Author: <NAME> <<EMAIL>> # # Permission is hereby granted, free of", "assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from", "self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires cache_lock to be", "the Software is # furnished to do so, subject to the following conditions:", "subject to the following conditions: # # The above copyright notice and this", "filesystem lock so we also lock across # multiple invocations self._cache_lock = Lock()", "(that we will exchange for an access token) or the cancellation message. This", "for interacting with web services relying on OpenID Connect. :param app_identifier: Identifier for", "If a token is not present with this token, a new one will", "is a json serialized dict # This dict contains uuid: token pairs #", "a token is retrieved if requested. It is always safe to call this", "not is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try:", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "until a token is retrieved if requested. It is always safe to call", "Every \"token\" object is a json dict with the following keys: # idp:", "query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for v in", "split = query.split('&') kv = dict([v.split('=', 1) for v in split]) if 'error'", "self._cache from the cache on disk. Requires cache_lock to be held by caller.\"\"\"", "This function will return a bearer token or None. Note that the bearer", "storage of retrieved tokens :param id_provider: URL of the identity provider to get", "have a valid token, now comes the hard part... uuid = self._get_new_token(scopes) if", "token and put the refreshed token in self._cache The caller is responsible for", "%s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just", "have on. :rtype: string or None :returns: String bearer token if possible or", "that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token: return", "= True token = self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token)", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "Dict indicating which fields need to be updated \"\"\" self.debug('Updating token %s in", "'error' in resp: self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token( uuid,", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "import os try: from urllib import urlencode except ImportError: from urllib.parse import urlencode", "tokens whose expires_at is still before the current time, but if no such", "client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services relying on", "a bearer token or None. Note that the bearer token might have been", "cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid]", "it. This function will return a bearer token or None. Note that the", "is hereby granted, free of charge, to any person obtaining a copy #", "23456] class OpenIDCClient(object): # Internal implementation of tokens: # Every app id has", "raise Exception('We were unable to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1]", "\"\"\"Property to get the cache file name for the current client. This assures", "refresh the token. # refresh_token: The token we can use to refresh the", "are unable to create it, the OSError will be thrown. :kwargs printfd: The", "-*- coding: utf-8 -*- # # Copyright (C) 2016, 2017 Red Hat, Inc.", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "the cache for any tokens that have the requested scopes. It will prefer", "= self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url =", "import copy import json import logging from threading import Lock import time try:", ":kwarg scopes: A list of scopes required for the current client. :kwarg new_token:", "lock so we also lock across # multiple invocations self._cache_lock = Lock() with", "set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at']", "contents or None if no applicable tokens were found \"\"\" possible_token = None", "send a 401 return code in incorrect cases. :kwargs http_method: The HTTP method", "token from the cache. If you get an indication from your application that", "token to be added to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token", "broken apps that may send a 401 return code in incorrect cases. :kwargs", "%s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token')", "401 return code in incorrect cases. :kwargs http_method: The HTTP method to use,", "access token) or the cancellation message. This function will store the new token", "auto_refresh: If False, will not try to automatically report token issues on 401.", "None self.debug('We got an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri':", "self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code =", "token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token in the", "print token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id =", "try to automatically report token issues on 401. This helps with broken apps", "arguments are like the arguments to requests, except for `scopes`, `new_token` and `auto_refresh`", "so, subject to the following conditions: # # The above copyright notice and", "in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else:", "succesful. :param uuid: The UUID of the cached token to attempt to refresh.", "get tokens from :param id_provider_mapping: Mapping with URLs to use for specific endpoints", "+ self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path for %s' %", "applicable tokens were found \"\"\" possible_token = None self.debug('Trying to get token with", "self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a token", "the cache on disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked()", "copy # of this software and associated documentation files (the \"Software\"), to deal", "retry. Report error self.report_token_issue() return resp else: return resp @property def _cachefile(self): \"\"\"Property", ":rtype: string or None :returns: String bearer token if possible or None \"\"\"", "resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at'])", "our client_id to get a new token with the specified scopes. The webserver", "file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code is False:", "message. This function will store the new token in the local cache, add", "the token caches. Will be put through expanduer. Default is ~/.openidc. If this", "a valid token, use that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token']", "self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE DONE! \\o/ return self._add_token(token)", "requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error'", "refresh_token: The token we can use to refresh the access token # scopes:", "the user or expired. In that case, you will want to call report_token_issue()", "fields need to be updated \"\"\" self.debug('Updating token %s in cache, fields %s',", "UUID of the token to be removed from cache \"\"\" self.debug('Removing token %s", "else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests", "Identifier used to request credentials :kwarg client_secret: The client \"secret\" that goes with", "\"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must be a list') token =", "from threading import Lock import time try: from StringIO import StringIO except ImportError:", "is not present with this token, a new one will be requested unless", "get the cache file name for the current client. This assures that whenever", "in the local cache, add it to the valid cache, and then return", "self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error' in resp:", "do not already have on. :kwarg auto_refresh: If False, will not try to", "len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. cache_lock may", "sublicense, and/or sell # copies of the Software, and to permit persons to", "self.debug('Returning possible token') return possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL for", "cache to disk. cache_lock may not be held by anyone. :param token: Dict", "cachedir: The directory in which to store the token caches. Will be put", "that worked, we will return the new access token. If it did not", "Inc. # Red Hat Author: <NAME> <<EMAIL>> # # Permission is hereby granted,", "subject that owns the token # access_token: Token value # token_type: Token type.", "__future__ import print_function from copy import copy import json import logging from threading", "= useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid", "worked, we will return the new access token. If it did not work,", "to get a new token with the specified scopes. The webserver will then", "scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time():", "type. Currently supported: \"Bearer\" # expires_at: Token expiration UTC time. NOTE: Even if", "is a token that may or may not still be valid self.debug('Possible') possible_token", "is False: # The user cancelled the request self._retrieved_code = None self.debug('User cancelled')", "to refresh the access token # scopes: A list of scopes that we", "IN THE # SOFTWARE. \"\"\"Client for applications relying on OpenID Connect for authentication.\"\"\"", "the token to be added to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding", "int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE DONE! \\o/", "cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir):", "with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self, scopes, new_token=True):", "to the valid cache, and then return the UUID. If the user cancelled", "= environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for v in split])", "self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates", ":param toupdate: Dict indicating which fields need to be updated \"\"\" self.debug('Updating token", "= kv['code'] # Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You", "if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if self.problem_reported: #", "= Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self,", "scopes: List of scopes that need to be in the returned token :rtype:", "# copies of the Software, and to permit persons to whom the Software", "be removed from cache \"\"\" self.debug('Removing token %s from cache', uuid) with self._cache_lock:", "return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make", "that owns the token # access_token: Token value # token_type: Token type. Currently", "required for this call. If a token is not present with this token,", "raise ValueError('Cannot provide json in a post call') if method not in ['POST']:", "held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on", "instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post", "token. If it did not work, we will return None and remove this", "= requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if", "self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self,", "this permission notice shall be included in # all copies or substantial portions", "assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile):", "will store the new token in the local cache, add it to the", "scopes that need to be in the returned token :rtype: (string, dict) or", "resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None token = {'access_token': resp['access_token'],", "= logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp =", "The token we can use to refresh the access token # scopes: A", "was lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue before", "token, a new one will be requested unless nonblocking is True. :kwarg new_token:", "client \"secret\" that goes with the client_id. May be None if your IdP", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "still be valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible token')", "a SimpleServer with an available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server =", "in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a", "on disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache", "token): \"\"\"Adds a token to the cache and writes cache to disk. cache_lock", "self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code =", "the token was succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token", "client_id: The Client Identifier used to request credentials :kwarg client_secret: The client \"secret\"", "ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if token: # If we", "token or None. Note that the bearer token might have been revoked by", "did not have a valid token, now comes the hard part... uuid =", "\"\"\"Updates a token in the cache. cache_lock may not be held by anyone.", "web services relying on OpenID Connect. :param app_identifier: Identifier for storage of retrieved", "'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'),", "token or delete the token. :kwarg scopes: A list of scopes required for", "start_response): query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1) for v", "application that the token you sent was invalid, you should call it. You", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "then need to catch the return with either an Authorization Code (that we", "though, since if we already have a token with the current app_identifier that", "with an available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port,", "do not already have on. :rtype: string or None :returns: String bearer token", "token if it could not be refreshed or saving the cache if renewal", "code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code']", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got a 401 and", "['POST']: raise ValueError('Cannot use POST tokens in %s method' % method) if 'data'", "+ resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function", "'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return", "its own token cache # The token cache is a json serialized dict", "in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp =", "arguments to requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is", "== 401 and not is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue()", "A list of scopes required for the current client. :kwarg new_token: If True,", "multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd =", "raise ValueError('Cannot use POST tokens in %s method' % method) if 'data' not", "= resp.json() if 'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return", "elif resp.status_code == 401: # We got a 401 and this is a", "nonblocking is True. :kwarg new_token: If True, we will actively request the user", "% token resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not", "ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False", "token with the current scopeset if we do not already have on. :rtype:", "with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid, toupdate):", "not require you to use a secret. :kwarg useragent: Useragent string to use.", "= token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token: return None #", "whom the Software is # furnished to do so, subject to the following", "to create it, the OSError will be thrown. :kwargs printfd: The File object", "cancelled') return None self.debug('We got an authorization code!') data = {'client_id': self.client_id, 'grant_type':", "function returns a SimpleServer with an available WEB_PORT.\"\"\" for port in WEB_PORTS: try:", "401: # We got a 401 and this is a retry. Report error", "of tokens: # Every app id has its own token cache # The", "with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token):", "the local cache, add it to the valid cache, and then return the", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "to call this though, since if we already have a token with the", "continue if token['expires_at'] < time.time(): # This is a token that's supposed to", "return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got a", "a json serialized dict # This dict contains uuid: token pairs # Every", "failed due to a server error or because the account or token was", "\"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post", "Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not", "coding: utf-8 -*- # # Copyright (C) 2016, 2017 Red Hat, Inc. #", "prefer it # over any others we have self.debug('Not yet expired, returning') return", "got an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code':", "the token to be removed from cache \"\"\" self.debug('Removing token %s from cache',", "is # furnished to do so, subject to the following conditions: # #", "StringIO except ImportError: from io import StringIO import socket import os try: from", "= None server = self._get_server(_token_app) if not server: raise Exception('We were unable to", "access token # scopes: A list of scopes that we had requested with", "' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] =", "before the current time, but if no such tokens exist it will return", "self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes):", "The IdP URL \"\"\" if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else:", "it did not work, we will return None and remove this token from", "had a valid token, use that self.last_returned_uuid = token[0] self.problem_reported = False return", "%s' % method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a token and put", "id_provider_mapping: Mapping with URLs to use for specific endpoints on the IdP. :kwarg", "'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery)", "user to get a new token with the current scopeset if we do", "401 and not is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if", "self._cache to cache on disk. Requires cache_lock to be held by caller.\"\"\" assert", "file is touched, the cache lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir,", "as uuidgen import webbrowser from wsgiref import simple_server import requests import sys from", "self._retrieved_code is False: # The user cancelled the request self._retrieved_code = None self.debug('User", "directory in which to store the token caches. Will be put through expanduer.", "\"\"\"Report an error with the last token that was returned. This will attempt", "self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache", "# access_token: Token value # token_type: Token type. Currently supported: \"Bearer\" # expires_at:", "was succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid)", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "in %s method' % method) if 'data' not in ckwargs: ckwargs['data'] = {}", "utf-8 -*- # # Copyright (C) 2016, 2017 Red Hat, Inc. # Red", "token from the cache and writes cache to disk. cache_lock may not be", "URLs to use for specific endpoints on the IdP. :kwarg use_post: Whether to", "incorrect cases. :kwargs http_method: The HTTP method to use, defaults to POST.. \"\"\"", "POST tokens in %s method' % method) if 'data' not in ckwargs: ckwargs['data']", "None else: token = self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post:", "be valid, prefer it # over any others we have self.debug('Not yet expired,", "provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to store the", ":param id_provider: URL of the identity provider to get tokens from :param id_provider_mapping:", "uuid: The UUID of the cached token to attempt to refresh. :rtype: bool", "= {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] =", "to renew the token or delete the token. :kwarg scopes: A list of", "from the cache on disk. cache_lock may not be held by anyone.\"\"\" with", "valid, it may have been revoked by # the user! Also, even if", "permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if", "a new one will be requested unless nonblocking is True. :kwarg new_token: If", "%s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None", "self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if", "in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error: #", "cache on disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing", "use for specific endpoints on the IdP. :kwarg use_post: Whether to use POST", "and keyword arguments are like the arguments to requests, except for `scopes`, `new_token`", "be thrown. :kwargs printfd: The File object to print token instructions to. \"\"\"", "return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs)", "by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if", "import Lock import time try: from StringIO import StringIO except ImportError: from io", "the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout):", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "query) print('Please visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close()", "print('Please visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert", "[u'You can close this window and return to the CLI'.encode('ascii')] self._retrieved_code = None", "before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if", "= {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'],", "returning') return uuid, token # This is a token that may or may", "used to request credentials :kwarg client_secret: The client \"secret\" that goes with the", "keyword arguments. `scopes` is required. :kwarg scopes: Scopes required for this call. If", "to call report_token_issue() to try to renew the token or delete the token.", "be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with", "a token and put the refreshed token in self._cache The caller is responsible", "furnished to do so, subject to the following conditions: # # The above", "return None and remove this token from the cache. If you get an", "arguments. `scopes` is required. :kwarg scopes: Scopes required for this call. If a", "False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id':", "resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function returns", "False else: self._retrieved_code = kv['code'] # Just return a message start_response('200 OK', [('Content-Type',", "some magic. We will start a new webserver on one of the WEB_PORTS,", "on OpenID Connect. :param app_identifier: Identifier for storage of retrieved tokens :param id_provider:", "string to use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory", "then either show the user a URL, or if possible, kick off their", "provide json in a post call') if method not in ['POST']: raise ValueError('Cannot", "for v in split]) if 'error' in kv: self.debug('Error code returned: %s (%s)',", "your IdP does not require you to use a secret. :kwarg useragent: Useragent", "self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache))", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "the identity provider to get tokens from :param id_provider_mapping: Mapping with URLs to", "get an indication from your application that the token you sent was invalid,", "method) if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else:", "return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query)", "sub: The subject that owns the token # access_token: Token value # token_type:", "return None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json in", "be included in # all copies or substantial portions of the Software. #", "or None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must be a list')", "the current client. :kwarg new_token: If True, we will actively request the user", "disk. cache_lock may not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self):", "removed from cache \"\"\" self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache()", "if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def", "we might still be able to # refresh the token. # refresh_token: The", "refreshed token in self._cache The caller is responsible for either removing the token", "dict([v.split('=', 1) for v in split]) if 'error' in kv: self.debug('Error code returned:", "method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry = True token", "elif not new_token: return None # We did not have a valid token,", "scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid]", "else: return ValueError('Idp Mapping did not include path for %s' % method) def", "import urlencode from uuid import uuid4 as uuidgen import webbrowser from wsgiref import", "window and return to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if", "__init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting", "self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json in a post call')", "app_identifier: Identifier for storage of retrieved tokens :param id_provider: URL of the identity", "may not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache", "token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])):", ":param app_identifier: Identifier for storage of retrieved tokens :param id_provider: URL of the", "has its own token cache # The token cache is a json serialized", "send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments and keyword arguments are", "held by anyone. :param token: Dict of the token to be added to", "'json' in ckwargs: raise ValueError('Cannot provide json in a post call') if method", "IdP. :kwarg use_post: Whether to use POST submission of client secrets rather than", "os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as", "to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def", "uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token in the cache. cache_lock may", "an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code}", "NOT call this function if the token was valid but your request failed", "be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory')", "cases. :kwargs http_method: The HTTP method to use, defaults to POST.. \"\"\" ckwargs", "create it, the OSError will be thrown. :kwargs printfd: The File object to", "use to refresh the access token # scopes: A list of scopes that", "if resp.status_code == 401 and not is_retry: if not auto_refresh: return resp self.token_to_try", "self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error'", "this file is touched, the cache lock is held \"\"\" assert self._cache_lock.locked() return", "resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND", "webbrowser from wsgiref import simple_server import requests import sys from openidc_client import release", "renew the token that was last returned. If that worked, we will return", "The UUID of the cached token to attempt to refresh. :rtype: bool :returns:", "tokens from :param id_provider_mapping: Mapping with URLs to use for specific endpoints on", "for authentication.\"\"\" from __future__ import print_function from copy import copy import json import", "to use for specific endpoints on the IdP. :kwarg use_post: Whether to use", "on disk. cache_lock may not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def", "could not be refreshed or saving the cache if renewal was succesful. :param", "import StringIO except ImportError: from io import StringIO import socket import os try:", "not have a valid token, now comes the hard part... uuid = self._get_new_token(scopes)", "else: if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s'", "import webbrowser from wsgiref import simple_server import requests import sys from openidc_client import", "Internal implementation of tokens: # Every app id has its own token cache", "be valid, it may have been revoked by # the user! Also, even", "we had a valid token, use that self.last_returned_uuid = token[0] self.problem_reported = False", "Authorization endpoint of the IdP with a request for our client_id to get", "self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try =", "file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache", "saving the cache if renewal was succesful. :param uuid: The UUID of the", "return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() +", "= os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None", "\"\"\"Removes a token from the cache and writes cache to disk. cache_lock may", "and contents or None if no applicable tokens were found \"\"\" possible_token =", "self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache:", "idp: The URL of the idp that issued the token # sub: The", "isinstance(scopes, list): raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if token:", "invalid, you should call it. You should explicitly NOT call this function if", "so we also lock across # multiple invocations self._cache_lock = Lock() with self._cache_lock:", "# Red Hat Author: <NAME> <<EMAIL>> # # Permission is hereby granted, free", "resp = resp.json() if 'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error'])", "scopeset if we do not already have on. :kwarg auto_refresh: If False, will", "If that worked, we will return the new access token. If it did", "import time try: from StringIO import StringIO except ImportError: from io import StringIO", "of the idp that issued the token # sub: The subject that owns", "will block until a token is retrieved if requested. It is always safe", "error or because the account or token was lacking specific permissions. \"\"\" if", "from the cache on disk. Requires cache_lock to be held by caller.\"\"\" assert", "of the cached token to attempt to refresh. :rtype: bool :returns: True if", "'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes", "we will exchange for an access token) or the cancellation message. This function", "expanduer. Default is ~/.openidc. If this does not exist and we are unable", "were reported an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None", "uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] != self.idp:", "cancelled the request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got an", "add it to the valid cache, and then return the UUID. If the", "the IdP mapping dict. :rtype: string :returns: The IdP URL \"\"\" if method", "string or None :returns: String bearer token if possible or None \"\"\" if", "user or expired. In that case, you will want to call report_token_issue() to", "_get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens that have the requested scopes.", "and `auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes required for this", "copyright notice and this permission notice shall be included in # all copies", "resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']),", "token caches. Will be put through expanduer. Default is ~/.openidc. If this does", "copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method", "deal # in the Software without restriction, including without limitation the rights #", "from your application that the token you sent was invalid, you should call", "it may have been revoked by # the user! Also, even if it", "= {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri']", "automatically report token issues on 401. This helps with broken apps that may", "urlencode from uuid import uuid4 as uuidgen import webbrowser from wsgiref import simple_server", "the self._cache from the cache on disk. cache_lock may not be held by", "does not exist and we are unable to create it, the OSError will", "that's supposed to still be valid, prefer it # over any others we", "open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to the", "dict with the following keys: # idp: The URL of the idp that", "whenever this file is touched, the cache lock is held \"\"\" assert self._cache_lock.locked()", "function if the token was valid but your request failed due to a", "Mapping with URLs to use for specific endpoints on the IdP. :kwarg use_post:", "HTTP method to use, defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes =", "query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to", "was invalid, you should call it. You should explicitly NOT call this function", "be valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible token') return", "call this though, since if we already have a token with the current", ":rtype: (string, dict) or None :returns: Token UUID and contents or None if", "<<EMAIL>> # # Permission is hereby granted, free of charge, to any person", "scopes, we will return it. This function will return a bearer token or", "returned token :rtype: (string, dict) or None :returns: Token UUID and contents or", "\"token\" object is a json dict with the following keys: # idp: The", "name for the current client. This assures that whenever this file is touched,", "self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir =", "= [] self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens", "the token you sent was invalid, you should call it. You should explicitly", "the Authorization endpoint of the IdP with a request for our client_id to", "Switch to next one continue def _get_new_token(self, scopes): \"\"\"This function kicks off some", "# token_type: Token type. Currently supported: \"Bearer\" # expires_at: Token expiration UTC time.", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "not try to automatically report token issues on 401. This helps with broken", "<NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to any", "to whom the Software is # furnished to do so, subject to the", "UUID and contents or None if no applicable tokens were found \"\"\" possible_token", "have been revoked by # the user! Also, even if it has expired,", "keys: # idp: The URL of the idp that issued the token #", "String bearer token if possible or None \"\"\" if not isinstance(scopes, list): raise", "call') if method not in ['POST']: raise ValueError('Cannot use POST tokens in %s", "f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to the cache and writes", "to be in the returned token :rtype: (string, dict) or None :returns: Token", "not be held by anyone.\"\"\" with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to", "through expanduer. Default is ~/.openidc. If this does not exist and we are", "in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in", "the token to be updated :param toupdate: Dict indicating which fields need to", "the current app_identifier that has the required scopes, we will return it. This", "that has the required scopes, we will return it. This function will return", "%s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if", "# Copyright (C) 2016, 2017 Red Hat, Inc. # Red Hat Author: <NAME>", "expired, we might still be able to # refresh the token. # refresh_token:", "exchanging authorization code: %s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'],", ":kwargs http_method: The HTTP method to use, defaults to POST.. \"\"\" ckwargs =", "might still be able to # refresh the token. # refresh_token: The token", "catch the return with either an Authorization Code (that we will exchange for", "self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a token from the cache", "self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status()", "a 401 return code in incorrect cases. :kwargs http_method: The HTTP method to", "a token that's supposed to still be valid, prefer it # over any", "OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications relying on OpenID", "# The ports that we will try to use for our webserver WEB_PORTS", "ValueError('Idp Mapping did not include path for %s' % method) def _refresh_token(self, uuid):", "True def _get_server(self, app): \"\"\"This function returns a SimpleServer with an available WEB_PORT.\"\"\"", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache))", "token in the local cache, add it to the valid cache, and then", "this though, since if we already have a token with the current app_identifier", "be held by anyone. :param token: UUID of the token to be updated", "with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache =", "{'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret", "still be valid, it may have been revoked by # the user! Also,", "the OSError will be thrown. :kwargs printfd: The File object to print token", "if no applicable tokens were found \"\"\" possible_token = None self.debug('Trying to get", "Authorization header :kwarg client_id: The Client Identifier used to request credentials :kwarg client_secret:", "'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token", "instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] =", "that goes with the client_id. May be None if your IdP does not", "self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping", "not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if self.problem_reported: # We", "json dict with the following keys: # idp: The URL of the idp", "data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret']", "ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST')", "NOTE: Even if the expires_at # indicates the token should still be valid,", "function will store the new token in the local cache, add it to", "= {} ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs: ckwargs['headers'] =", "UUID of the token to be updated :param toupdate: Dict indicating which fields", ":kwarg auto_refresh: If False, will not try to automatically report token issues on", "This dict contains uuid: token pairs # Every \"token\" object is a json", "refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already", "cache on disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing", "= uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] =", "or saving the cache if renewal was succesful. :param uuid: The UUID of", "sell # copies of the Software, and to permit persons to whom the", "= 'Bearer %s' % token resp = requests.request(method, *args, **ckwargs) if resp.status_code ==", "# We got a 401 and this is a retry. Report error self.report_token_issue()", "cache if renewal was succesful. :param uuid: The UUID of the cached token", "possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL for the requested method. :param", "# all copies or substantial portions of the Software. # # THE SOFTWARE", "or expired. In that case, you will want to call report_token_issue() to try", "ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method, *args,", "self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] <", "if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self,", "# sub: The subject that owns the token # access_token: Token value #", "rather than Authorization header :kwarg client_id: The Client Identifier used to request credentials", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications relying", "all copies or substantial portions of the Software. # # THE SOFTWARE IS", "return it. This function will return a bearer token or None. Note that", "already have on. :kwarg auto_refresh: If False, will not try to automatically report", "self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token", "URL will be the Authorization endpoint of the IdP with a request for", "bearer token if possible or None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes", "cache. cache_lock may not be held by anyone. :param token: UUID of the", "(C) 2016, 2017 Red Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>> #", "{'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s',", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "anyone. :param token: Dict of the token to be added to the cache", "to disk. cache_lock may not be held by anyone. :param uuid: UUID of", "prefer to return tokens whose expires_at is still before the current time, but", "# If we had a valid token, use that self.last_returned_uuid = token[0] self.problem_reported", "return uuid def _delete_token(self, uuid): \"\"\"Removes a token from the cache and writes", "Token expiration UTC time. NOTE: Even if the expires_at # indicates the token", "not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with specific scopes. This", "use a secret. :kwarg useragent: Useragent string to use. If not provided, defaults", "files (the \"Software\"), to deal # in the Software without restriction, including without", "Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>> # # Permission is hereby", "def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens that have the requested", "an access token) or the cancellation message. This function will store the new", "server.server_close() assert self._retrieved_code is not None if self._retrieved_code is False: # The user", "id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent = useragent", "= client_secret self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir", "or None if no applicable tokens were found \"\"\" possible_token = None self.debug('Trying", "post call') if method not in ['POST']: raise ValueError('Cannot use POST tokens in", "to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not server: raise", "refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token':", "self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got an authorization code!') data", "following conditions: # # The above copyright notice and this permission notice shall", "an error with the last token that was returned. This will attempt to", "Scopes required for this call. If a token is not present with this", "idp that issued the token # sub: The subject that owns the token", "assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f:", "URL of the identity provider to get tokens from :param id_provider_mapping: Mapping with", "= simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This port did not", "# SOFTWARE. \"\"\"Client for applications relying on OpenID Connect for authentication.\"\"\" from __future__", "always safe to call this though, since if we already have a token", "self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']}", "or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None", "kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code", "The above copyright notice and this permission notice shall be included in #", "owns the token # access_token: Token value # token_type: Token type. Currently supported:", "self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function to", "request. Allarguments and keyword arguments are like the arguments to requests, except for", "the WEB_PORTS, and then either show the user a URL, or if possible,", "in the returned token :rtype: (string, dict) or None :returns: Token UUID and", "or delete the token. :kwarg scopes: A list of scopes required for the", "self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self):", "required. :kwarg scopes: Scopes required for this call. If a token is not", "request for our client_id to get a new token with the specified scopes.", "error with the last token that was returned. This will attempt to renew", "requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required. :kwarg", "disk. cache_lock may not be held by anyone. :param token: Dict of the", "is a retry. Report error self.report_token_issue() return resp else: return resp @property def", "def report_token_issue(self): \"\"\"Report an error with the last token that was returned. This", "[] self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with", "it # over any others we have self.debug('Not yet expired, returning') return uuid,", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens: #", "def _get_server(self, app): \"\"\"This function returns a SimpleServer with an available WEB_PORT.\"\"\" for", "Authorization Code (that we will exchange for an access token) or the cancellation", "'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code'", "request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got an authorization code!')", "threading import Lock import time try: from StringIO import StringIO except ImportError: from", "has the required scopes, we will return it. This function will return a", "cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services relying on OpenID Connect. :param", "= self._get_token_with_scopes(scopes) if token: # If we had a valid token, use that", "with scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid) token =", "be held by anyone. :param uuid: UUID of the token to be removed", "None if self._retrieved_code is False: # The user cancelled the request self._retrieved_code =", "toupdate: Dict indicating which fields need to be updated \"\"\" self.debug('Updating token %s", "and this is a retry. Report error self.report_token_issue() return resp else: return resp", "a new token with the current scopeset if we do not already have", "the token. # refresh_token: The token we can use to refresh the access", "part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False return", "= self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "specific scopes. This function will block until a token is retrieved if requested.", "new access token. If it did not work, we will return None and", "If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to", "`auto_refresh` keyword arguments. `scopes` is required. :kwarg scopes: Scopes required for this call.", "responsible for either removing the token if it could not be refreshed or", "off their browser. This URL will be the Authorization endpoint of the IdP", "been revoked by the user or expired. In that case, you will want", "def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with specific scopes. This function", "notice shall be included in # all copies or substantial portions of the", "None # TODO: Make cache_lock a filesystem lock so we also lock across", "we had requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None,", "self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp =", "across # multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = []", "This is a token that may or may not still be valid self.debug('Possible')", "user a URL, or if possible, kick off their browser. This URL will", "Token value # token_type: Token type. Currently supported: \"Bearer\" # expires_at: Token expiration", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time()", "were found \"\"\" possible_token = None self.debug('Trying to get token with scopes %s',", "urlencode except ImportError: from urllib.parse import urlencode from uuid import uuid4 as uuidgen", "returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] #", "for applications relying on OpenID Connect for authentication.\"\"\" from __future__ import print_function from", "tokens were found \"\"\" possible_token = None self.debug('Trying to get token with scopes", "since if we already have a token with the current app_identifier that has", "with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else:", "the cached token to attempt to refresh. :rtype: bool :returns: True if the", "self.app_id = app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id", "if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue", "json import logging from threading import Lock import time try: from StringIO import", "return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache", "= ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method',", "try: from StringIO import StringIO except ImportError: from io import StringIO import socket", "requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable", "report token issues on 401. This helps with broken apps that may send", "False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with the last token that", "= True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request.", "urllib.parse import urlencode from uuid import uuid4 as uuidgen import webbrowser from wsgiref", "if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try = None else: token", "% self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. Requires", "either an Authorization Code (that we will exchange for an access token) or", "in which to store the token caches. Will be put through expanduer. Default", "dict # This dict contains uuid: token pairs # Every \"token\" object is", "expired. In that case, you will want to call report_token_issue() to try to", "to get the cache file name for the current client. This assures that", "token # access_token: Token value # token_type: Token type. Currently supported: \"Bearer\" #", "cache lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def", "return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery = {} rquery['scope'] = ' '.join(scopes) rquery['response_type']", "None :returns: String bearer token if possible or None \"\"\" if not isinstance(scopes,", "else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any", "the requested scopes. It will prefer to return tokens whose expires_at is still", "on 401. This helps with broken apps that may send a 401 return", "False, will not try to automatically report token issues on 401. This helps", "server = self._get_server(_token_app) if not server: raise Exception('We were unable to instantiate a", "token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None", "we also lock across # multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache()", "a request for our client_id to get a new token with the specified", "self.problem_reported = False self.token_to_try = None self._retrieved_code = None # TODO: Make cache_lock", "it might be refreshable. :param scopes: List of scopes that need to be", "cache file name for the current client. This assures that whenever this file", "False if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try = None else:", "'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to the cache", "The HTTP method to use, defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes", "uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid,", "OpenID Connect for authentication.\"\"\" from __future__ import print_function from copy import copy import", "able to # refresh the token. # refresh_token: The token we can use", "new token with the specified scopes. The webserver will then need to catch", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "for the current client. :kwarg new_token: If True, we will actively request the", "self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post: if 'json' in ckwargs:", "with self._cache_lock: self.__refresh_cache() def __write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires cache_lock", "Lock import time try: from StringIO import StringIO except ImportError: from io import", "in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers' not in", "disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if", "may not be held by anyone. :param uuid: UUID of the token to", "uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an", "not be held by anyone. :param uuid: UUID of the token to be", "file name for the current client. This assures that whenever this file is", "self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments and keyword", "'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported", "returns a SimpleServer with an available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server", "False return token[1]['access_token'] elif not new_token: return None # We did not have", "token. # refresh_token: The token we can use to refresh the access token", "method to use, defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes')", "None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must be a list') token", "to next one continue def _get_new_token(self, scopes): \"\"\"This function kicks off some magic.", "None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported =", "OK', [('Content-Type', 'text/plain')]) return [u'You can close this window and return to the", "put through expanduer. Default is ~/.openidc. If this does not exist and we", "be a list') token = self._get_token_with_scopes(scopes) if token: # If we had a", "token: it might be refreshable. :param scopes: List of scopes that need to", "hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False", "to the following conditions: # # The above copyright notice and this permission", "import logging from threading import Lock import time try: from StringIO import StringIO", "message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this window and return", "secrets rather than Authorization header :kwarg client_id: The Client Identifier used to request", "self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token in the cache.", "scopes): \"\"\"Searches the cache for any tokens that have the requested scopes. It", "is a json dict with the following keys: # idp: The URL of", "the last token that was returned. This will attempt to renew the token", "Every app id has its own token cache # The token cache is", "such tokens exist it will return the possibly expired token: it might be", "apps that may send a 401 return code in incorrect cases. :kwargs http_method:", "magic. We will start a new webserver on one of the WEB_PORTS, and", "self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of", "it has expired, we might still be able to # refresh the token.", "token # scopes: A list of scopes that we had requested with the", "of the identity provider to get tokens from :param id_provider_mapping: Mapping with URLs", "True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False if", "rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s' %", "in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None token = {'access_token':", "openidc_client import release # The ports that we will try to use for", "will be requested unless nonblocking is True. :kwarg new_token: If True, we will", "python-requests POST request. Allarguments and keyword arguments are like the arguments to requests,", "is touched, the cache lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json'", "self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path for %s'", "Software is # furnished to do so, subject to the following conditions: #", "# -*- coding: utf-8 -*- # # Copyright (C) 2016, 2017 Red Hat,", "os try: from urllib import urlencode except ImportError: from urllib.parse import urlencode from", "use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services relying on OpenID", "if no such tokens exist it will return the possibly expired token: it", "io import StringIO import socket import os try: from urllib import urlencode except", "(self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request()", "may not be held by anyone. :param token: UUID of the token to", "the cache if renewal was succesful. :param uuid: The UUID of the cached", "may or may not still be valid self.debug('Possible') possible_token = (uuid, token) if", "Will be put through expanduer. Default is ~/.openidc. If this does not exist", "'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s", "list of scopes required for the current client. :kwarg new_token: If True, we", "(or we got another error), we will return None. \"\"\" def _token_app(environ, start_response):", "_add_token(self, token): \"\"\"Adds a token to the cache and writes cache to disk.", "that we had requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id,", "wsgiref import simple_server import requests import sys from openidc_client import release # The", ":kwargs printfd: The File object to print token instructions to. \"\"\" self.logger =", "= ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode']", "Exception('Cannot report issue before requesting token') if self.problem_reported: # We were reported an", "use, defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token =", "self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on", "# This is a token that may or may not still be valid", "is still before the current time, but if no such tokens exist it", "token that was returned. This will attempt to renew the token that was", "with a request for our client_id to get a new token with the", "{'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp =", "= None self.problem_reported = False self.token_to_try = None self._retrieved_code = None # TODO:", "f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens',", "store the new token in the local cache, add it to the valid", "try to use for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): #", "issue before requesting token') if self.problem_reported: # We were reported an issue before.", "cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with", "and then return the UUID. If the user cancelled (or we got another", "if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile,", "the token was valid but your request failed due to a server error", "one will be requested unless nonblocking is True. :kwarg new_token: If True, we", "resp else: return resp @property def _cachefile(self): \"\"\"Property to get the cache file", "use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret", "self.client_id = client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' % \\", "was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens that", "caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as", "Currently supported: \"Bearer\" # expires_at: Token expiration UTC time. NOTE: Even if the", "to refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'],", "in split]) if 'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description'))", "copy import json import logging from threading import Lock import time try: from", "This function will block until a token is retrieved if requested. It is", "can use to refresh the access token # scopes: A list of scopes", "self._get_token_with_scopes(scopes) if token: # If we had a valid token, use that self.last_returned_uuid", "to try to renew the token or delete the token. :kwarg scopes: A", "possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL", "if not token: return None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot", "self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret", "with either an Authorization Code (that we will exchange for an access token)", "# over any others we have self.debug('Not yet expired, returning') return uuid, token", "added to the cache \"\"\" uuid = uuidgen().hex self.debug('Adding token %s to cache',", "with the client_id. May be None if your IdP does not require you", "use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which", "in ckwargs: raise ValueError('Cannot provide json in a post call') if method not", "have the requested scopes. It will prefer to return tokens whose expires_at is", ":returns: The IdP URL \"\"\" if method in self.idp_mapping: return self.idp + self.idp_mapping[method]", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "need to catch the return with either an Authorization Code (that we will", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "a list') token = self._get_token_with_scopes(scopes) if token: # If we had a valid", "self.debug('Not yet expired, returning') return uuid, token # This is a token that", "THE # SOFTWARE. \"\"\"Client for applications relying on OpenID Connect for authentication.\"\"\" from", "kicks off some magic. We will start a new webserver on one of", "case, you will want to call report_token_issue() to try to renew the token", "ckwargs: raise ValueError('Cannot provide json in a post call') if method not in", "None self._retrieved_code = None # TODO: Make cache_lock a filesystem lock so we", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "function kicks off some magic. We will start a new webserver on one", "= json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the", "= None resp = resp.json() if 'error' in resp: self.debug('Error exchanging authorization code:", "the following conditions: # # The above copyright notice and this permission notice", "authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if", "identity provider to get tokens from :param id_provider_mapping: Mapping with URLs to use", "token[1]['access_token'] elif not new_token: return None # We did not have a valid", "= query.split('&') kv = dict([v.split('=', 1) for v in split]) if 'error' in", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "may have been revoked by # the user! Also, even if it has", "@property def _cachefile(self): \"\"\"Property to get the cache file name for the current", "f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to the cache and writes cache", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "if possible, kick off their browser. This URL will be the Authorization endpoint", "# We were reported an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid)", "attempt to refresh. :rtype: bool :returns: True if the token was succesfully refreshed,", "scopes, new_token=True): \"\"\"Function to retrieve tokens with specific scopes. This function will block", "self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type':", "header :kwarg client_id: The Client Identifier used to request credentials :kwarg client_secret: The", "succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data", "%s not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): #", "rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url", "import socket import os try: from urllib import urlencode except ImportError: from urllib.parse", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "A list of scopes that we had requested with the token def __init__(self,", "self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if not token: return None", "client secrets rather than Authorization header :kwarg client_id: The Client Identifier used to", "new token with the current scopeset if we do not already have on.", "= requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not is_retry: if not", "to do so, subject to the following conditions: # # The above copyright", "= 'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit", "token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret:", "a token in the cache. cache_lock may not be held by anyone. :param", "uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone')", "to requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes` is required.", "of the token to be added to the cache \"\"\" uuid = uuidgen().hex", "to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not", "from io import StringIO import socket import os try: from urllib import urlencode", "by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w')", "for the current client. This assures that whenever this file is touched, the", "\"\"\" self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache()", "= self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping =", "= client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION", "self.debug('Trying to get token with scopes %s', scopes) for uuid in self._cache: self.debug('Checking", "expires_at is still before the current time, but if no such tokens exist", "token resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not is_retry:", "bool :returns: True if the token was succesfully refreshed, False otherwise \"\"\" oldtoken", "StringIO import socket import os try: from urllib import urlencode except ImportError: from", "token # This is a token that may or may not still be", "OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications relying on OpenID Connect", "open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read())", "token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client", "if not server: raise Exception('We were unable to instantiate a webserver') return_uri =", "self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope:", "self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from", "self.report_token_issue() return resp else: return resp @property def _cachefile(self): \"\"\"Property to get the", "the cache. If you get an indication from your application that the token", "self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path for %s' % method)", "return a bearer token or None. Note that the bearer token might have", "we can use to refresh the access token # scopes: A list of", "'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk.", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "# expires_at: Token expiration UTC time. NOTE: Even if the expires_at # indicates", "= self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def", "return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a token from", "self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a", "requested method. :param method: The method name in the IdP mapping dict. :rtype:", "authorization code: %s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at':", "\"secret\" that goes with the client_id. May be None if your IdP does", "server: raise Exception('We were unable to instantiate a webserver') return_uri = 'http://localhost:%i/' %", "of retrieved tokens :param id_provider: URL of the identity provider to get tokens", "data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json()", "self._get_server(_token_app) if not server: raise Exception('We were unable to instantiate a webserver') return_uri", "The ports that we will try to use for our webserver WEB_PORTS =", "permission notice shall be included in # all copies or substantial portions of", "ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs:", "before requesting token') if self.problem_reported: # We were reported an issue before. Let's", "and not is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not", "json serialized dict # This dict contains uuid: token pairs # Every \"token\"", "def _get_new_token(self, scopes): \"\"\"This function kicks off some magic. We will start a", "\"\"\"This function returns a SimpleServer with an available WEB_PORT.\"\"\" for port in WEB_PORTS:", "the new token in the local cache, add it to the valid cache,", "self.last_returned_uuid: raise Exception('Cannot report issue before requesting token') if self.problem_reported: # We were", "None self.debug('User cancelled') return None self.debug('We got an authorization code!') data = {'client_id':", "is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes", "not be held by anyone. :param token: UUID of the token to be", "None server = self._get_server(_token_app) if not server: raise Exception('We were unable to instantiate", "issued the token # sub: The subject that owns the token # access_token:", "None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv =", "toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache()", "IdP does not require you to use a secret. :kwarg useragent: Useragent string", "cancellation message. This function will store the new token in the local cache,", "the expires_at # indicates the token should still be valid, it may have", "Mapping did not include path for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries", "the token. :kwarg scopes: A list of scopes required for the current client.", "by anyone. :param token: UUID of the token to be updated :param toupdate:", "\"\"\"Make an python-requests POST request. Allarguments and keyword arguments are like the arguments", "scopes that we had requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping,", "def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. Requires cache_lock to", "a token from the cache and writes cache to disk. cache_lock may not", "self._retrieved_code = None # TODO: Make cache_lock a filesystem lock so we also", "did not work, we will return None and remove this token from the", "method. :param method: The method name in the IdP mapping dict. :rtype: string", "resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not is_retry: if", "# This dict contains uuid: token pairs # Every \"token\" object is a", "method name in the IdP mapping dict. :rtype: string :returns: The IdP URL", "if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did", "shall be included in # all copies or substantial portions of the Software.", "our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens:", "the cache lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id)", "True) method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry = True", "new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry", "endpoints on the IdP. :kwarg use_post: Whether to use POST submission of client", "work. Switch to next one continue def _get_new_token(self, scopes): \"\"\"This function kicks off", "from wsgiref import simple_server import requests import sys from openidc_client import release #", "or token was lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report", "requested scopes. It will prefer to return tokens whose expires_at is still before", "return the UUID. If the user cancelled (or we got another error), we", "None self.debug('Trying to get token with scopes %s', scopes) for uuid in self._cache:", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "if 'json' in ckwargs: raise ValueError('Cannot provide json in a post call') if", "which to store the token caches. Will be put through expanduer. Default is", "This assures that whenever this file is touched, the cache lock is held", "held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the", "logging from threading import Lock import time try: from StringIO import StringIO except", "we already have a token with the current app_identifier that has the required", "call report_token_issue() to try to renew the token or delete the token. :kwarg", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "We will start a new webserver on one of the WEB_PORTS, and then", "rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please", "%s' % token resp = requests.request(method, *args, **ckwargs) if resp.status_code == 401 and", "self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def", "import print_function from copy import copy import json import logging from threading import", "%s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is", "for any tokens that have the requested scopes. It will prefer to return", "%s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is a token", "have on. :kwarg auto_refresh: If False, will not try to automatically report token", "a URL, or if possible, kick off their browser. This URL will be", "or because the account or token was lacking specific permissions. \"\"\" if not", "if token: # If we had a valid token, use that self.last_returned_uuid =", "is required. :kwarg scopes: Scopes required for this call. If a token is", "None if no applicable tokens were found \"\"\" possible_token = None self.debug('Trying to", "import release # The ports that we will try to use for our", "ImportError: from urllib.parse import urlencode from uuid import uuid4 as uuidgen import webbrowser", "If you get an indication from your application that the token you sent", "File object to print token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug =", "= use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret =", "that the token you sent was invalid, you should call it. You should", "False self.token_to_try = None self._retrieved_code = None # TODO: Make cache_lock a filesystem", "thrown. :kwargs printfd: The File object to print token instructions to. \"\"\" self.logger", "URL \"\"\" if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp", "user cancelled the request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got", "cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens',", "then return the UUID. If the user cancelled (or we got another error),", "we do not already have on. :rtype: string or None :returns: String bearer", "want to call report_token_issue() to try to renew the token or delete the", "%s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret']", "be in the returned token :rtype: (string, dict) or None :returns: Token UUID", "updated \"\"\" self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock:", "need to be updated \"\"\" self.debug('Updating token %s in cache, fields %s', uuid,", "'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "anyone. :param uuid: UUID of the token to be removed from cache \"\"\"", "try: server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This port", "simple_server import requests import sys from openidc_client import release # The ports that", "defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to store the token", "def _update_token(self, uuid, toupdate): \"\"\"Updates a token in the cache. cache_lock may not", "to be removed from cache \"\"\" self.debug('Removing token %s from cache', uuid) with", "get token with scopes %s', scopes) for uuid in self._cache: self.debug('Checking %s', uuid)", "def _idp_url(self, method): \"\"\"Returns the IdP URL for the requested method. :param method:", "\"\"\"Searches the cache for any tokens that have the requested scopes. It will", "if not isinstance(scopes, list): raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes)", "and return to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not", "self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp')", "refresh the access token # scopes: A list of scopes that we had", "to refresh. :rtype: bool :returns: True if the token was succesfully refreshed, False", "self._retrieved_code = kv['code'] # Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return", "will not try to automatically report token issues on 401. This helps with", "that case, you will want to call report_token_issue() to try to renew the", "self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc')", "invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd", "is always safe to call this though, since if we already have a", "TODO: Make cache_lock a filesystem lock so we also lock across # multiple", "unless nonblocking is True. :kwarg new_token: If True, we will actively request the", "to use, defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token", "self.debug('Error exchanging authorization code: %s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token':", "use that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token:", "Author: <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge, to", "will return a bearer token or None. Note that the bearer token might", "< time.time(): # This is a token that's supposed to still be valid,", "ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry = True token = self.token_to_try", "with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def", "may not still be valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning", "self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f:", "# in the Software without restriction, including without limitation the rights # to", "renewal was succesful. :param uuid: The UUID of the cached token to attempt", "will then need to catch the return with either an Authorization Code (that", "Hat Author: <NAME> <<EMAIL>> # # Permission is hereby granted, free of charge,", "`scopes` is required. :kwarg scopes: Scopes required for this call. If a token", "os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with", "is retrieved if requested. It is always safe to call this though, since", "self.report_token_issue() if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401:", "import uuid4 as uuidgen import webbrowser from wsgiref import simple_server import requests import", "the access token # scopes: A list of scopes that we had requested", "in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not include", "json in a post call') if method not in ['POST']: raise ValueError('Cannot use", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "of the IdP with a request for our client_id to get a new", "uuid): \"\"\"Removes a token from the cache and writes cache to disk. cache_lock", "safe to call this though, since if we already have a token with", "True. :kwarg new_token: If True, we will actively request the user to get", "the request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We got an authorization", "a valid token, now comes the hard part... uuid = self._get_new_token(scopes) if uuid:", "not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp return", "be refreshed or saving the cache if renewal was succesful. :param uuid: The", "scopes): \"\"\"This function kicks off some magic. We will start a new webserver", "# scopes: A list of scopes that we had requested with the token", "!= self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset", "data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error' in resp: self.debug('Error", "{} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method, *args, **ckwargs) if", "if the token was succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "the self._cache from the cache on disk. Requires cache_lock to be held by", ":kwarg new_token: If True, we will actively request the user to get a", "token = self._get_token_with_scopes(scopes) if token: # If we had a valid token, use", "of the token to be removed from cache \"\"\" self.debug('Removing token %s from", "the user to get a new token with the current scopeset if we", "not already have on. :rtype: string or None :returns: String bearer token if", "this token from the cache. If you get an indication from your application", "require you to use a secret. :kwarg useragent: Useragent string to use. If", "CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not server: raise Exception('We were", "cache to disk. cache_lock may not be held by anyone. :param uuid: UUID", "The webserver will then need to catch the return with either an Authorization", "token. :kwarg scopes: A list of scopes required for the current client. :kwarg", "token: return None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json", "= self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return", "will be the Authorization endpoint of the IdP with a request for our", "indicating which fields need to be updated \"\"\" self.debug('Updating token %s in cache,", "the current time, but if no such tokens exist it will return the", "or the cancellation message. This function will store the new token in the", "oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status()", "-*- # # Copyright (C) 2016, 2017 Red Hat, Inc. # Red Hat", "get a new token with the current scopeset if we do not already", "List of scopes that need to be in the returned token :rtype: (string,", "refresh a token and put the refreshed token in self._cache The caller is", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "that need to be in the returned token :rtype: (string, dict) or None", "The client \"secret\" that goes with the client_id. May be None if your", "in self._cache: self.debug('Checking %s', uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect", "request failed due to a server error or because the account or token", "may send a 401 return code in incorrect cases. :kwargs http_method: The HTTP", "over any others we have self.debug('Not yet expired, returning') return uuid, token #", "= resp.json() if 'error' in resp: self.debug('Unable to refresh, error: %s', resp['error']) return", "supported: \"Bearer\" # expires_at: Token expiration UTC time. NOTE: Even if the expires_at", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "cache for any tokens that have the requested scopes. It will prefer to", "data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret", "ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method, *args, **ckwargs) if resp.status_code", "time.time() + resp['expires_in']}) self.debug('Refreshed until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This", "we will try to use for our webserver WEB_PORTS = [12345, 23456] class", "kv['error'], kv.get('error_description')) self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just return a", "the valid cache, and then return the UUID. If the user cancelled (or", "raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if token: # If", ":param id_provider_mapping: Mapping with URLs to use for specific endpoints on the IdP.", "object is a json dict with the following keys: # idp: The URL", "the possibly expired token: it might be refreshable. :param scopes: List of scopes", "except socket.error: # This port did not work. Switch to next one continue", "expiration UTC time. NOTE: Even if the expires_at # indicates the token should", "to return tokens whose expires_at is still before the current time, but if", "self._cache The caller is responsible for either removing the token if it could", "copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED", "None self._cache[uuid].update(toupdate) self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a token from the", "required scopes, we will return it. This function will return a bearer token", "_delete_token(self, uuid): \"\"\"Removes a token from the cache and writes cache to disk.", "scopes required for the current client. :kwarg new_token: If True, we will actively", "token you sent was invalid, you should call it. You should explicitly NOT", "disk. Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with", "SOFTWARE. \"\"\"Client for applications relying on OpenID Connect for authentication.\"\"\" from __future__ import", "token might have been revoked by the user or expired. In that case,", "WEB_PORTS, and then either show the user a URL, or if possible, kick", "%s in cache, fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not", "if method not in ['POST']: raise ValueError('Cannot use POST tokens in %s method'", "list') token = self._get_token_with_scopes(scopes) if token: # If we had a valid token,", "an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result =", "token cache # The token cache is a json serialized dict # This", "the cache. cache_lock may not be held by anyone. :param token: UUID of", "'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable to", "Permission is hereby granted, free of charge, to any person obtaining a copy", "in resp: self.debug('Unable to refresh, error: %s', resp['error']) return False self._update_token( uuid, {'access_token':", "id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services", "cancelled (or we got another error), we will return None. \"\"\" def _token_app(environ,", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services relying on OpenID Connect.", "Software without restriction, including without limitation the rights # to use, copy, modify,", "from the cache. If you get an indication from your application that the", "their browser. This URL will be the Authorization endpoint of the IdP with", "should explicitly NOT call this function if the token was valid but your", "%s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() +", "class OpenIDCClient(object): # Internal implementation of tokens: # Every app id has its", "Also, even if it has expired, we might still be able to #", "resp.status_code == 401 and not is_retry: if not auto_refresh: return resp self.token_to_try =", "os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client", "# The above copyright notice and this permission notice shall be included in", "# of this software and associated documentation files (the \"Software\"), to deal #", "If it did not work, we will return None and remove this token", "scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method =", "client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web services relying", "in a post call') if method not in ['POST']: raise ValueError('Cannot use POST", "'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None token =", "UTC time. NOTE: Even if the expires_at # indicates the token should still", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "the token if it could not be refreshed or saving the cache if", "that the bearer token might have been revoked by the user or expired.", "token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not", "bearer token might have been revoked by the user or expired. In that", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "expires_at: Token expiration UTC time. NOTE: Even if the expires_at # indicates the", "self._retrieved_code = False else: self._retrieved_code = kv['code'] # Just return a message start_response('200", "scopes: Scopes required for this call. If a token is not present with", "cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self,", "= {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method, *args, **ckwargs)", "\"\"\"Refreshes the self._cache from the cache on disk. Requires cache_lock to be held", "release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try", "already have a token with the current app_identifier that has the required scopes,", "uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token", ":param token: UUID of the token to be updated :param toupdate: Dict indicating", "token, now comes the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid =", "= ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry =", "sys from openidc_client import release # The ports that we will try to", ":param uuid: The UUID of the cached token to attempt to refresh. :rtype:", "need to be in the returned token :rtype: (string, dict) or None :returns:", "# the user! Also, even if it has expired, we might still be", "for this call. If a token is not present with this token, a", "IdP with a request for our client_id to get a new token with", "issues on 401. This helps with broken apps that may send a 401", "# Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close", "any others we have self.debug('Not yet expired, returning') return uuid, token # This", "set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is a token that's supposed", "token to the cache and writes cache to disk. cache_lock may not be", "UUID of the cached token to attempt to refresh. :rtype: bool :returns: True", "ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp = requests.request(method,", "if 'error' in resp: self.debug('Error exchanging authorization code: %s', resp['error']) return None token", "% authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code", "for our webserver WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation of", "current time, but if no such tokens exist it will return the possibly", "may not be held by anyone. :param token: Dict of the token to", "this call. If a token is not present with this token, a new", "self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path", "logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp = id_provider", "the specified scopes. The webserver will then need to catch the return with", "block until a token is retrieved if requested. It is always safe to", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&') kv = dict([v.split('=', 1)", "= uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error with", "the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not server: raise Exception('We", "valid cache, and then return the UUID. If the user cancelled (or we", "we are unable to create it, the OSError will be thrown. :kwargs printfd:", "None if your IdP does not require you to use a secret. :kwarg", "with the current scopeset if we do not already have on. :kwarg auto_refresh:", "\"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache", "to POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True)", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "the user! Also, even if it has expired, we might still be able", "with the current scopeset if we do not already have on. :rtype: string", "return server except socket.error: # This port did not work. Switch to next", "app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id", "current scopeset if we do not already have on. :rtype: string or None", "the bearer token might have been revoked by the user or expired. In", "raise Exception('Cannot report issue before requesting token') if self.problem_reported: # We were reported", "current scopeset if we do not already have on. :kwarg auto_refresh: If False,", "put the refreshed token in self._cache The caller is responsible for either removing", "request credentials :kwarg client_secret: The client \"secret\" that goes with the client_id. May", "%s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function returns a SimpleServer with", "or None :returns: String bearer token if possible or None \"\"\" if not", "# Every \"token\" object is a json dict with the following keys: #", "= None # TODO: Make cache_lock a filesystem lock so we also lock", "associated documentation files (the \"Software\"), to deal # in the Software without restriction,", "def _cachefile(self): \"\"\"Property to get the cache file name for the current client.", "return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping did not include path for", "of this software and associated documentation files (the \"Software\"), to deal # in", "exist it will return the possibly expired token: it might be refreshable. :param", "self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp", "pairs # Every \"token\" object is a json dict with the following keys:", "'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error' in", "delete the token. :kwarg scopes: A list of scopes required for the current", "self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s'", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "except ImportError: from io import StringIO import socket import os try: from urllib", "with broken apps that may send a 401 return code in incorrect cases.", "The subject that owns the token # access_token: Token value # token_type: Token", "return None self.debug('We got an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code',", "token: # If we had a valid token, use that self.last_returned_uuid = token[0]", "if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' %", "retrieve tokens with specific scopes. This function will block until a token is", "should call it. You should explicitly NOT call this function if the token", "report_token_issue(self): \"\"\"Report an error with the last token that was returned. This will", "been revoked by # the user! Also, even if it has expired, we", "use POST tokens in %s method' % method) if 'data' not in ckwargs:", "is ~/.openidc. If this does not exist and we are unable to create", "self.problem_reported: # We were reported an issue before. Let's just remove this token.", "id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None, printfd=sys.stdout): \"\"\"Client for interacting with web", "get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with specific scopes. This function will", "resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp = resp.json()", "held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir)", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "else: return resp @property def _cachefile(self): \"\"\"Property to get the cache file name", "of scopes required for the current client. :kwarg new_token: If True, we will", "this window and return to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app)", "specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue before requesting token')", "if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s", "relying on OpenID Connect. :param app_identifier: Identifier for storage of retrieved tokens :param", "= token else: if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] =", "= return_uri rquery['response_mode'] = 'query' query = urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'),", "token with the current app_identifier that has the required scopes, we will return", "\"\"\"Adds a token to the cache and writes cache to disk. cache_lock may", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token", "the Software without restriction, including without limitation the rights # to use, copy,", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "token') return possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL for the requested", "2016, 2017 Red Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>> # #", "person obtaining a copy # of this software and associated documentation files (the", "if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return resp", "return None # We did not have a valid token, now comes the", "server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error: # This port did", "data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable to refresh, error:", "token_type: Token type. Currently supported: \"Bearer\" # expires_at: Token expiration UTC time. NOTE:", "self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s',", "# # Copyright (C) 2016, 2017 Red Hat, Inc. # Red Hat Author:", "above copyright notice and this permission notice shall be included in # all", "token with the specified scopes. The webserver will then need to catch the", "self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def", "not be refreshed or saving the cache if renewal was succesful. :param uuid:", "% method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a token and put the", "If the user cancelled (or we got another error), we will return None.", "else: token = self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post: if", "= {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp", "or may not still be valid self.debug('Possible') possible_token = (uuid, token) if possible_token:", "= token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token in", "rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri rquery['response_mode'] = 'query' query", "goes with the client_id. May be None if your IdP does not require", "on the IdP. :kwarg use_post: Whether to use POST submission of client secrets", "are like the arguments to requests, except for `scopes`, `new_token` and `auto_refresh` keyword", "persons to whom the Software is # furnished to do so, subject to", "method not in ['POST']: raise ValueError('Cannot use POST tokens in %s method' %", "POST request. Allarguments and keyword arguments are like the arguments to requests, except", "rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] = return_uri", "conditions: # # The above copyright notice and this permission notice shall be", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "token # sub: The subject that owns the token # access_token: Token value", "del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the", "self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function returns a SimpleServer with an", "store the token caches. Will be put through expanduer. Default is ~/.openidc. If", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "documentation files (the \"Software\"), to deal # in the Software without restriction, including", "possibly expired token: it might be refreshable. :param scopes: List of scopes that", "report_token_issue() to try to renew the token or delete the token. :kwarg scopes:", "cache and writes cache to disk. cache_lock may not be held by anyone.", "the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported =", "or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "uuid = uuidgen().hex self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid]", "new one will be requested unless nonblocking is True. :kwarg new_token: If True,", "notice and this permission notice shall be included in # all copies or", "interacting with web services relying on OpenID Connect. :param app_identifier: Identifier for storage", "of charge, to any person obtaining a copy # of this software and", "held by anyone. :param token: UUID of the token to be updated :param", "applications relying on OpenID Connect for authentication.\"\"\" from __future__ import print_function from copy", "new token in the local cache, add it to the valid cache, and", "not work. Switch to next one continue def _get_new_token(self, scopes): \"\"\"This function kicks", "return ValueError('Idp Mapping did not include path for %s' % method) def _refresh_token(self,", "is not None if self._retrieved_code is False: # The user cancelled the request", "None :returns: Token UUID and contents or None if no applicable tokens were", "\"\"\"This function kicks off some magic. We will start a new webserver on", "= requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp = resp.json() if 'error' in resp:", "time.time(): # This is a token that's supposed to still be valid, prefer", "refreshed or saving the cache if renewal was succesful. :param uuid: The UUID", "tokens: # Every app id has its own token cache # The token", "self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was", "ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry =", "cache with %i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self,", "included in # all copies or substantial portions of the Software. # #", "= self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() self._retrieved_code = None resp", "start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this window and return to", "POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh", "May be None if your IdP does not require you to use a", "now comes the hard part... uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid", "access_token: Token value # token_type: Token type. Currently supported: \"Bearer\" # expires_at: Token", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "cache_lock may not be held by anyone. :param uuid: UUID of the token", "self.debug('Adding token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache()", "and associated documentation files (the \"Software\"), to deal # in the Software without", "not be held by anyone. :param token: Dict of the token to be", "any tokens that have the requested scopes. It will prefer to return tokens", "request the user to get a new token with the current scopeset if", "useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid =", "otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id,", "to print token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id", "The directory in which to store the token caches. Will be put through", "idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes),", ":param method: The method name in the IdP mapping dict. :rtype: string :returns:", "in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def", "you to use a secret. :kwarg useragent: Useragent string to use. If not", "is_retry = True token = self.token_to_try self.token_to_try = None else: token = self.get_token(scopes,", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "\"\"\"Client for interacting with web services relying on OpenID Connect. :param app_identifier: Identifier", "% \\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported =", "It is always safe to call this though, since if we already have", "on. :kwarg auto_refresh: If False, will not try to automatically report token issues", "WEB_PORTS = [12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens: # Every", "= copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token', True) auto_refresh = ckwargs.pop('auto_refresh', True)", "\"python-openidc-client/VERSION\" :kwarg cachedir: The directory in which to store the token caches. Will", "valid token, use that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif", "to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache))", "user cancelled (or we got another error), we will return None. \"\"\" def", "helps with broken apps that may send a 401 return code in incorrect", "resp.raise_for_status() resp = resp.json() if 'error' in resp: self.debug('Unable to refresh, error: %s',", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE DONE! \\o/ return", "= self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if not token:", "% (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url)", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "requests import sys from openidc_client import release # The ports that we will", "required for the current client. :kwarg new_token: If True, we will actively request", "assert self._retrieved_code is not None if self._retrieved_code is False: # The user cancelled", "either removing the token if it could not be refreshed or saving the", "resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes}", "by the user or expired. In that case, you will want to call", "uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + resp['expires_in']}) self.debug('Refreshed until", "self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent = useragent or", "possible_token = (uuid, token) if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self,", "token was lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot report issue", "None resp = resp.json() if 'error' in resp: self.debug('Error exchanging authorization code: %s',", "the current client. This assures that whenever this file is touched, the cache", "client_id self.client_secret = client_secret self.useragent = useragent or 'python-openid-client/%s' % \\ release.VERSION self.cachedir", "work, we will return None and remove this token from the cache. If", "# We did not have a valid token, now comes the hard part...", "# The token cache is a json serialized dict # This dict contains", "a retry. Report error self.report_token_issue() return resp else: return resp @property def _cachefile(self):", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "call. If a token is not present with this token, a new one", "defaults to POST.. \"\"\" ckwargs = copy(kwargs) scopes = ckwargs.pop('scopes') new_token = ckwargs.pop('new_token',", "returned. If that worked, we will return the new access token. If it", "self._retrieved_code = None server = self._get_server(_token_app) if not server: raise Exception('We were unable", "\"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type':", "will prefer to return tokens whose expires_at is still before the current time,", "will actively request the user to get a new token with the current", "the IdP URL for the requested method. :param method: The method name in", "not set(scopes).issubset(set(token['scopes'])): self.debug('Missing scope: %s not subset of %s', set(scopes), set(token['scopes'])) continue if", "last returned. If that worked, we will return the new access token. If", "the arguments to requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments. `scopes`", "%s method' % method) if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token']", "None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json in a", "= False if self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try = None", "self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report an error", "token that's supposed to still be valid, prefer it # over any others", "Dict of the token to be added to the cache \"\"\" uuid =", "a filesystem lock so we also lock across # multiple invocations self._cache_lock =", "f: self._cache = json.loads(f.read()) self.debug('Loaded %i tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache", "uuidgen import webbrowser from wsgiref import simple_server import requests import sys from openidc_client", "held by anyone. :param uuid: UUID of the token to be removed from", "Token type. Currently supported: \"Bearer\" # expires_at: Token expiration UTC time. NOTE: Even", "tokens with specific scopes. This function will block until a token is retrieved", "resp.json() if 'error' in resp: self.debug('Unable to refresh, error: %s', resp['error']) return False", "id_provider: URL of the identity provider to get tokens from :param id_provider_mapping: Mapping", "token %s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return", "if it has expired, we might still be able to # refresh the", "or None :returns: Token UUID and contents or None if no applicable tokens", "= None self.debug('User cancelled') return None self.debug('We got an authorization code!') data =", "self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp", "\\ release.VERSION self.cachedir = os.path.expanduser(cachedir or '~/.openidc') self.last_returned_uuid = None self.problem_reported = False", "token = self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if not", "Just return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this", "and we are unable to create it, the OSError will be thrown. :kwargs", "to store the token caches. Will be put through expanduer. Default is ~/.openidc.", "sent was invalid, you should call it. You should explicitly NOT call this", "ValueError('Cannot use POST tokens in %s method' % method) if 'data' not in", "or if possible, kick off their browser. This URL will be the Authorization", "%s to cache', uuid) with self._cache_lock: self.__refresh_cache() self._cache[uuid] = token self.__write_cache() return uuid", "the user cancelled (or we got another error), we will return None. \"\"\"", "self.debug = self.logger.debug self.app_id = app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping", "not already have on. :kwarg auto_refresh: If False, will not try to automatically", "not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization'] = 'Bearer %s' % token resp", "= False self.token_to_try = None self._retrieved_code = None # TODO: Make cache_lock a", "v in split]) if 'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'],", "possible, kick off their browser. This URL will be the Authorization endpoint of", "self.token_to_try: is_retry = True token = self.token_to_try self.token_to_try = None else: token =", "# Internal implementation of tokens: # Every app id has its own token", "2017 Red Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>> # # Permission", "in # all copies or substantial portions of the Software. # # THE", "SimpleServer with an available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0',", "method' % method) if 'data' not in ckwargs: ckwargs['data'] = {} ckwargs['data']['access_token'] =", "dict contains uuid: token pairs # Every \"token\" object is a json dict", "\"\"\" if method in self.idp_mapping: return self.idp + self.idp_mapping[method] else: return ValueError('Idp Mapping", "# The user cancelled the request self._retrieved_code = None self.debug('User cancelled') return None", "visit %s to grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code", "useragent: Useragent string to use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir:", "string :returns: The IdP URL \"\"\" if method in self.idp_mapping: return self.idp +", "= urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant", "return [u'You can close this window and return to the CLI'.encode('ascii')] self._retrieved_code =", "self.debug('We got an authorization code!') data = {'client_id': self.client_id, 'grant_type': 'authorization_code', 'redirect_uri': return_uri,", "fields %s', uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return", "not exist and we are unable to create it, the OSError will be", "The method name in the IdP mapping dict. :rtype: string :returns: The IdP", "cache \"\"\" self.debug('Removing token %s from cache', uuid) with self._cache_lock: self.__refresh_cache() if uuid", "with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False, useragent=None, cachedir=None,", "free of charge, to any person obtaining a copy # of this software", "import sys from openidc_client import release # The ports that we will try", "\"\"\"Client for applications relying on OpenID Connect for authentication.\"\"\" from __future__ import print_function", "self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id = client_id self.client_secret = client_secret self.useragent", "token) if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method): \"\"\"Returns the", "# refresh the token. # refresh_token: The token we can use to refresh", "we do not already have on. :kwarg auto_refresh: If False, will not try", "refresh. :rtype: bool :returns: True if the token was succesfully refreshed, False otherwise", "show the user a URL, or if possible, kick off their browser. This", "new_token=True): \"\"\"Function to retrieve tokens with specific scopes. This function will block until", "with the current app_identifier that has the required scopes, we will return it.", "This URL will be the Authorization endpoint of the IdP with a request", "= None else: token = self.get_token(scopes, new_token=new_token) if not token: return None if", "StringIO import StringIO except ImportError: from io import StringIO import socket import os", "present with this token, a new one will be requested unless nonblocking is", "cache_lock may not be held by anyone. :param token: UUID of the token", "webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code is False: #", "from __future__ import print_function from copy import copy import json import logging from", "provider to get tokens from :param id_provider_mapping: Mapping with URLs to use for", "webserver will then need to catch the return with either an Authorization Code", "not server: raise Exception('We were unable to instantiate a webserver') return_uri = 'http://localhost:%i/'", "we got another error), we will return None. \"\"\" def _token_app(environ, start_response): query", "not work, we will return None and remove this token from the cache.", "\"\"\"Returns the IdP URL for the requested method. :param method: The method name", "uuid): \"\"\"Tries to refresh a token and put the refreshed token in self._cache", "if not self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: #", "off some magic. We will start a new webserver on one of the", "for our client_id to get a new token with the specified scopes. The", "oldtoken = self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token',", ":kwarg client_id: The Client Identifier used to request credentials :kwarg client_secret: The client", "uuid: UUID of the token to be removed from cache \"\"\" self.debug('Removing token", "This port did not work. Switch to next one continue def _get_new_token(self, scopes):", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "with web services relying on OpenID Connect. :param app_identifier: Identifier for storage of", "__write_cache(self): \"\"\"Wirtes self._cache to cache on disk. Requires cache_lock to be held by", "return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported", "tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a", "authentication.\"\"\" from __future__ import print_function from copy import copy import json import logging", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "time. NOTE: Even if the expires_at # indicates the token should still be", "authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if", "Software, and to permit persons to whom the Software is # furnished to", "USE OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for applications relying on", "valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible token') return possible_token", "which fields need to be updated \"\"\" self.debug('Updating token %s in cache, fields", "did not include path for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries to", "toupdate): \"\"\"Updates a token in the cache. cache_lock may not be held by", "to get tokens from :param id_provider_mapping: Mapping with URLs to use for specific", "Copyright (C) 2016, 2017 Red Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>>", "= False return token[1]['access_token'] elif not new_token: return None # We did not", "This is a token that's supposed to still be valid, prefer it #", "except ImportError: from urllib.parse import urlencode from uuid import uuid4 as uuidgen import", "the following keys: # idp: The URL of the idp that issued the", "token was succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid] self.debug('Refreshing token %s',", "from uuid import uuid4 as uuidgen import webbrowser from wsgiref import simple_server import", "be able to # refresh the token. # refresh_token: The token we can", "token') del self._cache[uuid] self.__write_cache() else: self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches", "The user cancelled the request self._retrieved_code = None self.debug('User cancelled') return None self.debug('We", "an indication from your application that the token you sent was invalid, you", "with the last token that was returned. This will attempt to renew the", "caches. Will be put through expanduer. Default is ~/.openidc. If this does not", "contains uuid: token pairs # Every \"token\" object is a json dict with", "None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an", "%s', uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if", "Exception('We were unable to instantiate a webserver') return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1] rquery", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "**kwargs) elif resp.status_code == 401: # We got a 401 and this is", "token if possible or None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must", "remove this token from the cache. If you get an indication from your", "We were reported an issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return", "token[0] self.problem_reported = False return token[1]['access_token'] elif not new_token: return None # We", "self.token_to_try: return resp return self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got", "The token cache is a json serialized dict # This dict contains uuid:", "webserver on one of the WEB_PORTS, and then either show the user a", "and then either show the user a URL, or if possible, kick off", "will want to call report_token_issue() to try to renew the token or delete", "return resp else: return resp @property def _cachefile(self): \"\"\"Property to get the cache", "to renew the token that was last returned. If that worked, we will", "and writes cache to disk. cache_lock may not be held by anyone. :param", "token, use that self.last_returned_uuid = token[0] self.problem_reported = False return token[1]['access_token'] elif not", "keyword arguments are like the arguments to requests, except for `scopes`, `new_token` and", "still be valid, prefer it # over any others we have self.debug('Not yet", ":param uuid: UUID of the token to be removed from cache \"\"\" self.debug('Removing", "port, app) return server except socket.error: # This port did not work. Switch", "mapping dict. :rtype: string :returns: The IdP URL \"\"\" if method in self.idp_mapping:", "'refresh_token': oldtoken['refresh_token']} if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data)", "whose expires_at is still before the current time, but if no such tokens", "in the Software without restriction, including without limitation the rights # to use,", "to use. If not provided, defaults to \"python-openidc-client/VERSION\" :kwarg cachedir: The directory in", "be held by anyone. :param token: Dict of the token to be added", "auto_refresh = ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try:", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "to request credentials :kwarg client_secret: The client \"secret\" that goes with the client_id.", "the token # sub: The subject that owns the token # access_token: Token", "had requested with the token def __init__(self, app_identifier, id_provider, id_provider_mapping, client_id, client_secret=None, use_post=False,", "resp.raise_for_status() self._retrieved_code = None resp = resp.json() if 'error' in resp: self.debug('Error exchanging", "token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug = self.logger.debug self.app_id = app_identifier", "The URL of the idp that issued the token # sub: The subject", "possible token') return possible_token def _idp_url(self, method): \"\"\"Returns the IdP URL for the", "is a token that's supposed to still be valid, prefer it # over", "local cache, add it to the valid cache, and then return the UUID.", "will return the new access token. If it did not work, we will", "exist and we are unable to create it, the OSError will be thrown.", "not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w')", ":param token: Dict of the token to be added to the cache \"\"\"", "cache is a json serialized dict # This dict contains uuid: token pairs", "not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r')", "anyone. :param token: UUID of the token to be updated :param toupdate: Dict", "requests.request(method, *args, **ckwargs) if resp.status_code == 401 and not is_retry: if not auto_refresh:", "= self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not set(scopes).issubset(set(token['scopes'])): self.debug('Missing", "should still be valid, it may have been revoked by # the user!", ":kwarg scopes: Scopes required for this call. If a token is not present", "like the arguments to requests, except for `scopes`, `new_token` and `auto_refresh` keyword arguments.", "error self.report_token_issue() return resp else: return resp @property def _cachefile(self): \"\"\"Property to get", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "token that was last returned. If that worked, we will return the new", "method): \"\"\"Returns the IdP URL for the requested method. :param method: The method", "of the WEB_PORTS, and then either show the user a URL, or if", "cache, add it to the valid cache, and then return the UUID. If", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "it, the OSError will be thrown. :kwargs printfd: The File object to print", "to be updated :param toupdate: Dict indicating which fields need to be updated", "call this function if the token was valid but your request failed due", "Client Identifier used to request credentials :kwarg client_secret: The client \"secret\" that goes", "yet expired, returning') return uuid, token # This is a token that may", "if 'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code =", "token is not present with this token, a new one will be requested", "and/or sell # copies of the Software, and to permit persons to whom", "# multiple invocations self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd", "will start a new webserver on one of the WEB_PORTS, and then either", "a token with the current app_identifier that has the required scopes, we will", "it could not be refreshed or saving the cache if renewal was succesful.", "for storage of retrieved tokens :param id_provider: URL of the identity provider to", "tokens :param id_provider: URL of the identity provider to get tokens from :param", "'w') as f: f.write(json.dumps({})) with open(self._cachefile, 'r') as f: self._cache = json.loads(f.read()) self.debug('Loaded", "self.debug('Token was already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens", "error), we will return None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split", "True if the token was succesfully refreshed, False otherwise \"\"\" oldtoken = self._cache[uuid]", "app id has its own token cache # The token cache is a", "uuid = self._get_new_token(scopes) if uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token']", "the UUID. If the user cancelled (or we got another error), we will", "own token cache # The token cache is a json serialized dict #", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "return with either an Authorization Code (that we will exchange for an access", "continue def _get_new_token(self, scopes): \"\"\"This function kicks off some magic. We will start", "for either removing the token if it could not be refreshed or saving", "for the requested method. :param method: The method name in the IdP mapping", "you get an indication from your application that the token you sent was", "if self.client_secret: data['client_secret'] = self.client_secret resp = requests.request( 'POST', self._idp_url('Token'), data=data) resp.raise_for_status() resp", "tokens exist it will return the possibly expired token: it might be refreshable.", "# # The above copyright notice and this permission notice shall be included", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "close this window and return to the CLI'.encode('ascii')] self._retrieved_code = None server =", "last token that was returned. This will attempt to renew the token that", "caller is responsible for either removing the token if it could not be", "the cache and writes cache to disk. cache_lock may not be held by", "be put through expanduer. Default is ~/.openidc. If this does not exist and", "of the token to be updated :param toupdate: Dict indicating which fields need", "be updated \"\"\" self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys()) with", ":rtype: string :returns: The IdP URL \"\"\" if method in self.idp_mapping: return self.idp", "have self.debug('Not yet expired, returning') return uuid, token # This is a token", "Note that the bearer token might have been revoked by the user or", "scopes. The webserver will then need to catch the return with either an", "[('Content-Type', 'text/plain')]) return [u'You can close this window and return to the CLI'.encode('ascii')]", "self.debug('Refreshing cache') if not os.path.isdir(self.cachedir): self.debug('Creating directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file')", "self._cache_lock = Lock() with self._cache_lock: self.__refresh_cache() self._valid_cache = [] self._printfd = printfd def", "if it could not be refreshed or saving the cache if renewal was", "This will attempt to renew the token that was last returned. If that", "possible or None \"\"\" if not isinstance(scopes, list): raise ValueError('Scopes must be a", "self.problem_reported = False return token[1]['access_token'] elif not new_token: return None # We did", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "self.send_request(*args, **kwargs) elif resp.status_code == 401: # We got a 401 and this", "token :rtype: (string, dict) or None :returns: Token UUID and contents or None", "uuid) with self._cache_lock: self.__refresh_cache() if uuid in self._cache: self.debug('Removing token') del self._cache[uuid] self.__write_cache()", "assures that whenever this file is touched, the cache lock is held \"\"\"", "to attempt to refresh. :rtype: bool :returns: True if the token was succesfully", "urlencode(rquery) authz_url = '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization'", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "printfd: The File object to print token instructions to. \"\"\" self.logger = logging.getLogger(__name__)", "that may or may not still be valid self.debug('Possible') possible_token = (uuid, token)", "the IdP. :kwarg use_post: Whether to use POST submission of client secrets rather", "if uuid: self.last_returned_uuid = uuid self.problem_reported = False return self._cache[uuid]['access_token'] def report_token_issue(self): \"\"\"Report", "endpoint of the IdP with a request for our client_id to get a", "= printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve tokens with specific scopes.", ":param scopes: List of scopes that need to be in the returned token", "'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code = False", "Requires cache_lock to be held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i", "Connect for authentication.\"\"\" from __future__ import print_function from copy import copy import json", "writes cache to disk. cache_lock may not be held by anyone. :param token:", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "account or token was lacking specific permissions. \"\"\" if not self.last_returned_uuid: raise Exception('Cannot", "is_retry: if not auto_refresh: return resp self.token_to_try = self.report_token_issue() if not self.token_to_try: return", "with URLs to use for specific endpoints on the IdP. :kwarg use_post: Whether", "uuid, toupdate.keys()) with self._cache_lock: self.__refresh_cache() if uuid not in self._cache: return None self._cache[uuid].update(toupdate)", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "we will return it. This function will return a bearer token or None.", "if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide json in a post", "that have the requested scopes. It will prefer to return tokens whose expires_at", "grant authorization' % authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None", "OSError will be thrown. :kwargs printfd: The File object to print token instructions", "to disk. cache_lock may not be held by anyone. :param token: Dict of", "\"\"\"Refreshes the self._cache from the cache on disk. cache_lock may not be held", "Connect. :param app_identifier: Identifier for storage of retrieved tokens :param id_provider: URL of", "def _add_token(self, token): \"\"\"Adds a token to the cache and writes cache to", "it will return the possibly expired token: it might be refreshable. :param scopes:", "%i tokens', len(self._cache)) with open(self._cachefile, 'w') as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds", "ckwargs['data'] = {} ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs: ckwargs['headers']", "specific endpoints on the IdP. :kwarg use_post: Whether to use POST submission of", "not still be valid self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible", "None self.problem_reported = False self.token_to_try = None self._retrieved_code = None # TODO: Make", "touched, the cache lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' %", ":returns: Token UUID and contents or None if no applicable tokens were found", "app_identifier that has the required scopes, we will return it. This function will", "not isinstance(scopes, list): raise ValueError('Scopes must be a list') token = self._get_token_with_scopes(scopes) if", "portions of the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "the returned token :rtype: (string, dict) or None :returns: Token UUID and contents", "return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs): \"\"\"Make an python-requests POST request. Allarguments and", "do so, subject to the following conditions: # # The above copyright notice", "until %s', self._cache[uuid]['expires_at']) return True def _get_server(self, app): \"\"\"This function returns a SimpleServer", "might be refreshable. :param scopes: List of scopes that need to be in", "expired token: it might be refreshable. :param scopes: List of scopes that need", "port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return server except socket.error:", "if the expires_at # indicates the token should still be valid, it may", "lock is held \"\"\" assert self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self):", "not subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This", "ckwargs['data']['access_token'] = token else: if 'headers' not in ckwargs: ckwargs['headers'] = {} ckwargs['headers']['Authorization']", "None and remove this token from the cache. If you get an indication", "permit persons to whom the Software is # furnished to do so, subject", "= app_identifier self.use_post = use_post self.idp = id_provider self.idp_mapping = id_provider_mapping self.client_id =", "was last returned. If that worked, we will return the new access token.", "held by caller.\"\"\" assert self._cache_lock.locked() self.debug('Writing cache with %i tokens', len(self._cache)) with open(self._cachefile,", "tokens that have the requested scopes. It will prefer to return tokens whose", "did not work. Switch to next one continue def _get_new_token(self, scopes): \"\"\"This function", "# TODO: Make cache_lock a filesystem lock so we also lock across #", "(uuid, token) if possible_token: self.debug('Returning possible token') return possible_token def _idp_url(self, method): \"\"\"Returns", "Red Hat, Inc. # Red Hat Author: <NAME> <<EMAIL>> # # Permission is", "a 401 and this is a retry. Report error self.report_token_issue() return resp else:", "If True, we will actively request the user to get a new token", "split]) if 'error' in kv: self.debug('Error code returned: %s (%s)', kv['error'], kv.get('error_description')) self._retrieved_code", "# refresh_token: The token we can use to refresh the access token #", "expired, returning') return uuid, token # This is a token that may or", "= self.get_token(scopes, new_token=new_token) if not token: return None if self.use_post: if 'json' in", "self.__write_cache() return uuid def _delete_token(self, uuid): \"\"\"Removes a token from the cache and", "= self._cache[uuid] self.debug('Refreshing token %s', uuid) data = {'client_id': self.client_id, 'grant_type': 'refresh_token', 'refresh_token':", "uuid) token = self._cache[uuid] if token['idp'] != self.idp: self.debug('Incorrect idp') continue if not", "will return None. \"\"\" def _token_app(environ, start_response): query = environ['QUERY_STRING'] split = query.split('&')", "import StringIO import socket import os try: from urllib import urlencode except ImportError:", "cache_lock may not be held by anyone. :param token: Dict of the token", "directory') os.makedirs(self.cachedir) if not os.path.exists(self._cachefile): self.debug('Creating file') with open(self._cachefile, 'w') as f: f.write(json.dumps({}))", "scopes. This function will block until a token is retrieved if requested. It", "must be a list') token = self._get_token_with_scopes(scopes) if token: # If we had", "_get_new_token(self, scopes): \"\"\"This function kicks off some magic. We will start a new", "'~/.openidc') self.last_returned_uuid = None self.problem_reported = False self.token_to_try = None self._retrieved_code = None", "will exchange for an access token) or the cancellation message. This function will", "try to renew the token or delete the token. :kwarg scopes: A list", "\"\"\" possible_token = None self.debug('Trying to get token with scopes %s', scopes) for", "subset of %s', set(scopes), set(token['scopes'])) continue if token['expires_at'] < time.time(): # This is", "retrieved if requested. It is always safe to call this though, since if", "actively request the user to get a new token with the current scopeset", "not refresh_result: self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self,", "by anyone. :param uuid: UUID of the token to be removed from cache", "The File object to print token instructions to. \"\"\" self.logger = logging.getLogger(__name__) self.debug", "Identifier for storage of retrieved tokens :param id_provider: URL of the identity provider", "attempt to renew the token that was last returned. If that worked, we", "refreshable. :param scopes: List of scopes that need to be in the returned", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "True token = self.token_to_try self.token_to_try = None else: token = self.get_token(scopes, new_token=new_token) if", "as f: f.write(json.dumps(self._cache)) def _add_token(self, token): \"\"\"Adds a token to the cache and", "*args, **ckwargs) if resp.status_code == 401 and not is_retry: if not auto_refresh: return", "= '%s?%s' % (self._idp_url('Authorization'), query) print('Please visit %s to grant authorization' % authz_url,", "that may send a 401 return code in incorrect cases. :kwargs http_method: The", "scopeset if we do not already have on. :rtype: string or None :returns:", "available WEB_PORT.\"\"\" for port in WEB_PORTS: try: server = simple_server.make_server('0.0.0.0', port, app) return", "the refreshed token in self._cache The caller is responsible for either removing the", "was returned. This will attempt to renew the token that was last returned.", "indication from your application that the token you sent was invalid, you should", "does not require you to use a secret. :kwarg useragent: Useragent string to", "It will prefer to return tokens whose expires_at is still before the current", "ImportError: from io import StringIO import socket import os try: from urllib import", "copy import copy import json import logging from threading import Lock import time", "[12345, 23456] class OpenIDCClient(object): # Internal implementation of tokens: # Every app id", "self._cache_lock.locked() return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id) def __refresh_cache(self): \"\"\"Refreshes the self._cache from the", "to be updated \"\"\" self.debug('Updating token %s in cache, fields %s', uuid, toupdate.keys())", "IdP mapping dict. :rtype: string :returns: The IdP URL \"\"\" if method in", ":kwarg cachedir: The directory in which to store the token caches. Will be", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "error: %s', resp['error']) return False self._update_token( uuid, {'access_token': resp['access_token'], 'token_type': resp['token_type'], 'refresh_token': resp['refresh_token'],", "next one continue def _get_new_token(self, scopes): \"\"\"This function kicks off some magic. We", "we will actively request the user to get a new token with the", "client_id to get a new token with the specified scopes. The webserver will", "might have been revoked by the user or expired. In that case, you", "self._delete_token(self.last_returned_uuid) return None else: self.problem_reported = True return self._cache[self.last_returned_uuid]['access_token'] def send_request(self, *args, **kwargs):", "was succesful. :param uuid: The UUID of the cached token to attempt to", "not in ['POST']: raise ValueError('Cannot use POST tokens in %s method' % method)", "either show the user a URL, or if possible, kick off their browser.", "self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid) if not refresh_result: self._delete_token(self.last_returned_uuid) return None else:", "self._valid_cache = [] self._printfd = printfd def get_token(self, scopes, new_token=True): \"\"\"Function to retrieve", "kick off their browser. This URL will be the Authorization endpoint of the", "= ckwargs.pop('auto_refresh', True) method = ckwargs.pop('http_method', 'POST') is_retry = False if self.token_to_try: is_retry", "following keys: # idp: The URL of the idp that issued the token", "Whether to use POST submission of client secrets rather than Authorization header :kwarg", "self.debug('Possible') possible_token = (uuid, token) if possible_token: self.debug('Returning possible token') return possible_token def", "for %s' % method) def _refresh_token(self, uuid): \"\"\"Tries to refresh a token and", "token['expires_at'] < time.time(): # This is a token that's supposed to still be", "requested. It is always safe to call this though, since if we already", "+ int(resp['expires_in']), 'idp': self.idp, 'token_type': resp['token_type'], 'scopes': scopes} # AND WE ARE DONE!", "# This is a token that's supposed to still be valid, prefer it", "query.split('&') kv = dict([v.split('=', 1) for v in split]) if 'error' in kv:", "current client. :kwarg new_token: If True, we will actively request the user to", "got a 401 and this is a retry. Report error self.report_token_issue() return resp", "token: Dict of the token to be added to the cache \"\"\" uuid", "still before the current time, but if no such tokens exist it will", "a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this window and", "specified scopes. The webserver will then need to catch the return with either", "ValueError('Cannot provide json in a post call') if method not in ['POST']: raise", "or None. Note that the bearer token might have been revoked by the", "URL for the requested method. :param method: The method name in the IdP", "= None self._retrieved_code = None # TODO: Make cache_lock a filesystem lock so", "token issues on 401. This helps with broken apps that may send a", "on OpenID Connect for authentication.\"\"\" from __future__ import print_function from copy import copy", "'grant_type': 'authorization_code', 'redirect_uri': return_uri, 'code': self._retrieved_code} if self.client_secret: data['client_secret'] = self.client_secret resp =", "return a message start_response('200 OK', [('Content-Type', 'text/plain')]) return [u'You can close this window", "from StringIO import StringIO except ImportError: from io import StringIO import socket import", "resp = resp.json() if 'error' in resp: self.debug('Unable to refresh, error: %s', resp['error'])", "token is retrieved if requested. It is always safe to call this though,", "already gone') def _get_token_with_scopes(self, scopes): \"\"\"Searches the cache for any tokens that have", "token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time() + int(resp['expires_in']), 'idp': self.idp, 'token_type':", "_update_token(self, uuid, toupdate): \"\"\"Updates a token in the cache. cache_lock may not be", "unable to create it, the OSError will be thrown. :kwargs printfd: The File", "tokens', len(self._cache)) def _refresh_cache(self): \"\"\"Refreshes the self._cache from the cache on disk. cache_lock", "we will return the new access token. If it did not work, we", "hereby granted, free of charge, to any person obtaining a copy # of", "requesting token') if self.problem_reported: # We were reported an issue before. Let's just", "This function will store the new token in the local cache, add it", "not token: return None if self.use_post: if 'json' in ckwargs: raise ValueError('Cannot provide", "issue before. Let's just remove this token. self._delete_token(self.last_returned_uuid) return None refresh_result = self._refresh_token(self.last_returned_uuid)", "OpenID Connect. :param app_identifier: Identifier for storage of retrieved tokens :param id_provider: URL", "we will return None and remove this token from the cache. If you", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. \"\"\"Client for", "_idp_url(self, method): \"\"\"Returns the IdP URL for the requested method. :param method: The", "because the account or token was lacking specific permissions. \"\"\" if not self.last_returned_uuid:", "token in the cache. cache_lock may not be held by anyone. :param token:", "{} rquery['scope'] = ' '.join(scopes) rquery['response_type'] = 'code' rquery['client_id'] = self.client_id rquery['redirect_uri'] =", "self._cache[uuid] = token self.__write_cache() return uuid def _update_token(self, uuid, toupdate): \"\"\"Updates a token", "print_function from copy import copy import json import logging from threading import Lock", "uuid, toupdate): \"\"\"Updates a token in the cache. cache_lock may not be held", "return to the CLI'.encode('ascii')] self._retrieved_code = None server = self._get_server(_token_app) if not server:", "Make cache_lock a filesystem lock so we also lock across # multiple invocations", "authz_url, file=self._printfd) webbrowser.open(authz_url) server.handle_request() server.server_close() assert self._retrieved_code is not None if self._retrieved_code is", "code: %s', resp['error']) return None token = {'access_token': resp['access_token'], 'refresh_token': resp['refresh_token'], 'expires_at': time.time()", "False: # The user cancelled the request self._retrieved_code = None self.debug('User cancelled') return" ]
[ "and this permission notice shall be included in all # copies of this", "elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else", "or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #-------------------------------------------------------------------------------", "#------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input else", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "= avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges", "the Software is # furnished to do so, subject to the following conditions:", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection,", "'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements", "the Software without restriction, including without limitation the rights # to use, copy,", "not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "= (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm,", "= None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return", "person obtaining a copy # of this software and associated documentation files (the", "None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [],", "_encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding))", "attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference", "ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype =", "if outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type is not None:", "this permission notice shall be included in all # copies of this Software", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None:", "EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright", "!= 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval)))", "return elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm))", "dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v))", "= {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is", "#------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\",", "LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import (", "None: attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if", "WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj", "of this Software or works derived from this Software. # # THE SOFTWARE", "enum.values) for range_ in ranges: attr, elms = {}, [] if range_.closure !=", "[] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not", "is hereby granted, free of charge, to any person obtaining a copy #", "<NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT", "None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input):", "persons to whom the Software is # furnished to do so, subject to", "*[OWS(\"UOM\", u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is", "outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs']", "conditions: # # The above copyright notice and this permission notice shall be", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in", "avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange):", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference", "def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\")", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "documentation files (the \"Software\"), to deal # in the Software without restriction, including", "elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif", "_encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\",", "# THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum,", "isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj,", "to permit persons to whom the Software is # furnished to do so,", "EOX IT Services GmbH # # Permission is hereby granted, free of charge,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def", "BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode common part of the", "prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist", "isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm,", "*_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part of the execure response data", "in ranges: attr, elms = {}, [] if range_.closure != 'closed': attr =", "of charge, to any person obtaining a copy # of this software and", "be included in all # copies of this Software or works derived from", "range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing)))", "elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not", "**attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else", ") def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None:", "True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode process", "elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if isinstance(avobj, AllowedAny):", "False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem", "encode_input_exec(prm): \"\"\" Encode common part of the execure response data input.\"\"\" return WPS(\"Input\",", "so, subject to the following conditions: # # The above copyright notice and", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #-------------------------------------------------------------------------------", "#------------------------------------------------------------------------------- # # WPS 1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org>", "outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference is not", "def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] =", "attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval", "outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema']", "*elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input", "elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if", "if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None:", "encoders # # Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>>", "GmbH # # Permission is hereby granted, free of charge, to any person", "is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def", "copy # of this software and associated documentation files (the \"Software\"), to deal", "data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part of", "\"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges", "common sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or", "_encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\",", "\"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u", "to the following conditions: # # The above copyright notice and this permission", "def encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\" attrib = {} if", "False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all XML parameters.\"\"\"", "execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure", "Encode common sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title", "prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name,", "for range_ in ranges: attr, elms = {}, [] if range_.closure != 'closed':", "attr, elms = {}, [] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure}", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #-------------------------------------------------------------------------------", "is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist =", "( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description", "#------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH # # Permission is", "dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\",", "and associated documentation files (the \"Software\"), to deal # in the Software without", "dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\",", "encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\",", "prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) ))", "of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\",", "elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input", "frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema))", "if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\",", "})) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in", "eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from", "from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\"", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "= NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData):", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response output", "sublicense, and/or sell # copies of the Software, and to permit persons to", "Software is # furnished to do so, subject to the following conditions: #", "outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema is not", "if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\",", "isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object!", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem =", "_encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\")", "parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> #", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "is not None: attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType'] =", "= outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference is", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def", "elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\",", "\"\"\" Encode common part of the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm))", "elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem =", "NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is", "[], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{", "elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if", "response output definition.\"\"\" attrib = {} if outdef.uom is not None: attrib['uom'] =", "2013 EOX IT Services GmbH # # Permission is hereby granted, free of", "<http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C)", "NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm,", "# copies of the Software, and to permit persons to whom the Software", "None: attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs if", "str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if", "is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\"", "in all # copies of this Software or works derived from this Software.", "attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def", "[OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\",", "frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\",", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm,", "not None: attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type", "elms = {}, [] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if", "elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return", "NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()])", "if outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs is not None:", ") #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm))", "execure response output definition.\"\"\" attrib = {} if outdef.uom is not None: attrib['uom']", "*_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True))", "elem def encode_input_exec(prm): \"\"\" Encode common part of the execure response data input.\"\"\"", "THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import", "def encode_input_exec(prm): \"\"\" Encode common part of the execure response data input.\"\"\" return", "prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return", "is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding'] =", "not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing", "eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode", "copies of this Software or works derived from this Software. # # THE", "outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false'", "= prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"):", "# WPS 1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors:", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for", "Encode common part of the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def", "software and associated documentation files (the \"Software\"), to deal # in the Software", "is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs in", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return", "isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm):", "else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common", "{}, [] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is", "not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms,", "f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding", "isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\"", "and to permit persons to whom the Software is # furnished to do", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the", "prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if", "the following conditions: # # The above copyright notice and this permission notice", "= 'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm,", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "# furnished to do so, subject to the following conditions: # # The", "avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges =", "the Software, and to permit persons to whom the Software is # furnished", "shall be included in all # copies of this Software or works derived", "return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, })", "rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "# copies of this Software or works derived from this Software. # #", "\"\"\" Encode common sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if", "not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False),", "attrib = {} if outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs", "# Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #-------------------------------------------------------------------------------", "_encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\",", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "None: attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true' if", "merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to", "for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None:", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING", "from this Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData,", "OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\"", "elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None:", "to do so, subject to the following conditions: # # The above copyright", "elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm,", "IT Services GmbH # # Permission is hereby granted, free of charge, to", "None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\",", "common part of the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def", "or works derived from this Software. # # THE SOFTWARE IS PROVIDED \"AS", "is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if", "dtype.encode(v)) for v in enum.values) for range_ in ranges: attr, elms = {},", "elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value", "NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default", "<NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH #", "else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype()", "the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the", "ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for", "is not None: attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs'] =", "= \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif", "elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode", "whom the Software is # furnished to do so, subject to the following", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is", "all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title", "outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding']", "free of charge, to any person obtaining a copy # of this software", "return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\",", "elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software,", "elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode common part of the execure", "if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype", "USE OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import (", "return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum =", "ranges, elist = None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj,", "OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters", "elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if", "None: attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema if", "without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense,", "not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_ in ranges: attr,", "NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif", "is # furnished to do so, subject to the following conditions: # #", "avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype =", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "<<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services", "if outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference is not None:", "encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\" attrib = {} if outdef.uom", "if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema is not None:", "to deal # in the Software without restriction, including without limitation the rights", "to any person obtaining a copy # of this software and associated documentation", "elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode process description", "= avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype", "NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms:", "None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib)", "title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def", "input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs in prm.crss])", "ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS,", "= avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\",", "is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "works derived from this Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem", "elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum,", "= outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is", "return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\",", "= outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema is", "= {}, [] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval", "Software, and to permit persons to whom the Software is # furnished to", "None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if", "part of the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm):", "AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #-------------------------------------------------------------------------------", "\"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE.", "*_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm,", "this software and associated documentation files (the \"Software\"), to deal # in the", "title_required=True): \"\"\" Encode common sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)]", "if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges,", "if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\",", "1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>>", "ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj,", "this Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "in enum.values) for range_ in ranges: attr, elms = {}, [] if range_.closure", "execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common", "avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges =", "if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem =", "granted, free of charge, to any person obtaining a copy # of this", "response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response", "{ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not", "BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem", "outdef.schema is not None: attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference']", "def encode_output_exec(prm): \"\"\" Encode common part of the execure response data output.\"\"\" return", "= outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type is", "furnished to do so, subject to the following conditions: # # The above", "modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "= NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if", "prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype =", "# Permission is hereby granted, free of charge, to any person obtaining a", "enum, ranges, elist = None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif", "v in enum.values) for range_ in ranges: attr, elms = {}, [] if", "definition.\"\"\" attrib = {} if outdef.uom is not None: attrib['uom'] = outdef.uom if", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX", "# # WPS 1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> #", "description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\"", "WPS 1.0 parameters' XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors: <NAME>", "None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input", "sub-elements of all XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required:", "publish, distribute, sublicense, and/or sell # copies of the Software, and to permit", "outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType']", "frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm,", "else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs in prm.crss]) )", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\"", "elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def", "dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None:", "if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist)", "True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem", "else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt):", "elist = None, [], [] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference):", "if outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return", "process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif", "**attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all XML parameters.\"\"\" elist", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum", ")) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem", "without restriction, including without limitation the rights # to use, copy, modify, merge,", "not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist = None,", "in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is", "\"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm,", "if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None:", "LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True))", "NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\",", "in the Software without restriction, including without limitation the rights # to use,", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\",", "copies of the Software, and to permit persons to whom the Software is", "OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values))", "**{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u)", "_encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\",", "frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return", "def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all XML parameters.\"\"\" elist =", "ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise", "Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- #", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm): \"\"\"", "def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\",", "= NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema", "= [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract:", "if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_", "WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all", "is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def", "notice shall be included in all # copies of this Software or works", "SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from", "#------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference,", "enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype", "notice and this permission notice shall be included in all # copies of", "elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, }))", "obtaining a copy # of this software and associated documentation files (the \"Software\"),", "part of the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\"", "AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url,", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\":", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if", "None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is", "*_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of all XML", "The above copyright notice and this permission notice shall be included in all", "and/or sell # copies of the Software, and to permit persons to whom", "prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input):", "False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode common", "input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part of the", "\"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])),", "enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_ in", "OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\",", "\"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm,", "outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not", "# in the Software without restriction, including without limitation the rights # to", "avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v", "the execure response output definition.\"\"\" attrib = {} if outdef.uom is not None:", "( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import", "from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, )", "Encode the execure response output definition.\"\"\" attrib = {} if outdef.uom is not", "None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr))", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "}) elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj]", "return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for", "*[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type))", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN #", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "Copyright (C) 2013 EOX IT Services GmbH # # Permission is hereby granted,", "encode_output_exec(prm): \"\"\" Encode common part of the execure response data output.\"\"\" return WPS(\"Output\",", "elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode common part", "not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm,", "any person obtaining a copy # of this software and associated documentation files", "# # The above copyright notice and this permission notice shall be included", "not None: attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true'", "\"Software\"), to deal # in the Software without restriction, including without limitation the", "AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL,", "elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj,", "elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) )) if", "NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs)) for crs", "elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def", "*_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\" attrib = {}", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] =", "a copy # of this software and associated documentation files (the \"Software\"), to", "deal # in the Software without restriction, including without limitation the rights #", "elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm,", "outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode", "AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, )", "DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData,", "if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms])", "Software or works derived from this Software. # # THE SOFTWARE IS PROVIDED", "elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm,", "return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if", "#------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"]", "(C) 2013 EOX IT Services GmbH # # Permission is hereby granted, free", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "(the \"Software\"), to deal # in the Software without restriction, including without limitation", "import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process", "elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [], []", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "all # copies of this Software or works derived from this Software. #", "output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\"", "return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\" attrib", "distribute, sublicense, and/or sell # copies of the Software, and to permit persons", "import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util", "return elem def _encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if isinstance(avobj,", "raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if", "is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj):", "charge, to any person obtaining a copy # of this software and associated", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData):", "AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj)", "elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True))", "'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True):", "common part of the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef):", "is not None: attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema'] =", "enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum,", "range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #-------------------------------------------------------------------------------", "NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) ) def _encode_format(frmt): elem =", "\"\"\" Encode common part of the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm,", "True)) return elem def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\",", "elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def _encode_allowed_value(avobj): enum,", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "is_input): dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name,", "# Copyright (C) 2013 EOX IT Services GmbH # # Permission is hereby", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "to whom the Software is # furnished to do so, subject to the", "return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in", "limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData):", "Encode common part of the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False))", "False)) return elem def encode_input_exec(prm): \"\"\" Encode common part of the execure response", "AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection):", "None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_ in ranges: attr, elms", "= outdef.schema if outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference else", "#------------------------------------------------------------------------------- def _encode_bbox(prm, is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))),", "= [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid", "elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return", "def _encode_allowed_value(avobj): enum, ranges, elist = None, [], [] if isinstance(avobj, AllowedAny): return", "do so, subject to the following conditions: # # The above copyright notice", "range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\",", "if frmt.schema is not None: elem.append(NIL(\"Schema\", frmt.schema)) return elem #------------------------------------------------------------------------------- def _encode_bbox(prm, is_input):", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange,", "avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype", "# #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH # # Permission", "permit persons to whom the Software is # furnished to do so, subject", "Permission is hereby granted, free of charge, to any person obtaining a copy", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode the execure response output definition.\"\"\" attrib =", "prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem = NIL(\"LiteralData\"", "is not None: attrib['schema'] = outdef.schema if outdef.as_reference is not None: attrib['asReference'] =", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "Software without restriction, including without limitation the rights # to use, copy, modify,", "is not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS(\"Output\", *_encode_param_common(outdef,", "of the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\"", "dtype = prm.dtype elem = NIL(\"LiteralData\" if is_input else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{", "# The above copyright notice and this permission notice shall be included in", "# of this software and associated documentation files (the \"Software\"), to deal #", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "above copyright notice and this permission notice shall be included in all #", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE", "object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None:", "sell # copies of the Software, and to permit persons to whom the", "OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\",", "elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_ in ranges: attr, elms =", "XML parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or", "isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, True)) return elem def encode_output_descr(prm):", "elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return", "parameters.\"\"\" elist = [OWS(\"Identifier\", prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier))", "prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist", "# # Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> #", "<<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH # #", "range_ in ranges: attr, elms = {}, [] if range_.closure != 'closed': attr", "# <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013 EOX IT Services GmbH", "restriction, including without limitation the rights # to use, copy, modify, merge, publish,", "else \"LiteralOutput\") elem.append(OWS(\"DataType\", dtype.name, **{ ns_ows(\"reference\"): \"http://www.w3.org/TR/xmlschema-2/#%s\"%dtype.name, })) if prm.uoms: elem.append(NIL(\"UOMs\", NIL(\"Default\", OWS(\"UOM\",", "output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData):", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part of the execure", "# # Permission is hereby granted, free of charge, to any person obtaining", "not None: attrib['encoding'] = outdef.encoding if outdef.schema is not None: attrib['schema'] = outdef.schema", "u) for u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not", "Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False))", "ranges: attr, elms = {}, [] if range_.closure != 'closed': attr = {ns_ows(\"rangeClosure\"):", "prm.identifier)] if prm.title or title_required: elist.append(OWS(\"Title\", prm.title or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract))", "ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\"", "outdef.as_reference is not None: attrib['asReference'] = 'true' if outdef.as_reference else 'false' return WPS(\"Output\",", "or prm.identifier)) if prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False))", "NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\", frmt.encoding)) if frmt.schema is not", "(\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData):", ") from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm):", "def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not None: elem.append(NIL(\"Encoding\",", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum):", "for v in enum.values) for range_ in ranges: attr, elms = {}, []", "[avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else: raise TypeError(\"Invalid allowed", "'closed': attr = {ns_ows(\"rangeClosure\"): range_.closure} if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if", "files (the \"Software\"), to deal # in the Software without restriction, including without", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "= {} if outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs is", "not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding", "def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f)", "{} if outdef.uom is not None: attrib['uom'] = outdef.uom if outdef.crs is not", "isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm, False)) return elem def encode_input_exec(prm): \"\"\" Encode common part of", "AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum", "the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "BoundingBoxData, AllowedAny, AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS,", "isinstance(avobj, AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges", "# Authors: <NAME> <<EMAIL>> # <NAME> <<EMAIL>> # #------------------------------------------------------------------------------- # Copyright (C) 2013", "following conditions: # # The above copyright notice and this permission notice shall", "of the Software, and to permit persons to whom the Software is #", "**{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif isinstance(avobj, AllowedEnum): enum = avobj elif", "if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for f in prm.formats.itervalues()]) )", "output definition.\"\"\" attrib = {} if outdef.uom is not None: attrib['uom'] = outdef.uom", "prm.abstract: elist.append(OWS(\"Abstract\", prm.abstract)) return elist #------------------------------------------------------------------------------- def _encode_literal(prm, is_input): dtype = prm.dtype elem", "ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\",", "Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"]", "attrib['mimeType'] = outdef.mime_type if outdef.encoding is not None: attrib['encoding'] = outdef.encoding if outdef.schema", "this Software or works derived from this Software. # # THE SOFTWARE IS", "is_input): return NIL(\"BoundingBoxData\" if is_input else \"BoundingBoxOutput\", NIL(\"Default\", NIL(\"CRS\", prm.encode_crs(prm.default_crs))), NIL(\"Supported\", *[NIL(\"CRS\", prm.encode_crs(crs))", "AllowedRange): ranges = [avobj] elif isinstance(avobj, AllowedRangeCollection): enum, ranges = avobj.enum, avobj.ranges else:", "\"0\")[bool(prm.is_optional)] elem.attrib[\"maxOccurs\"] = \"1\" if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm,", "return elem def encode_input_exec(prm): \"\"\" Encode common part of the execure response data", "XML encoders # # Project: EOxServer <http://eoxserver.org> # Authors: <NAME> <<EMAIL>> # <NAME>", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "of the execure response data output.\"\"\" return WPS(\"Output\", *_encode_param_common(prm)) def encode_output_def(outdef): \"\"\" Encode", "[] if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"):", "\"\"\" Encode process description input.\"\"\" elem = NIL(\"Input\", *_encode_param_common(prm)) elem.attrib[\"minOccurs\"] = (\"1\", \"0\")[bool(prm.is_optional)]", "encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData):", "AllowedEnum, AllowedRange, AllowedRangeCollection, AllowedByReference, ) from eoxserver.services.ows.wps.v10.util import ( OWS, WPS, NIL, ns_ows,", "IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData, BoundingBoxData, AllowedAny,", "attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type", "if isinstance(avobj, AllowedAny): return OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url,", "elif isinstance(avobj, AllowedEnum): enum = avobj elif isinstance(avobj, AllowedRange): ranges = [avobj] elif", "\"\"\" Encode the execure response output definition.\"\"\" attrib = {} if outdef.uom is", "response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part", "if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default))) return elem def", "outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs if outdef.mime_type is not", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "including without limitation the rights # to use, copy, modify, merge, publish, distribute,", "if range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\",", "_encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)), NIL(\"Supported\", *[_encode_format(f) for", "Services GmbH # # Permission is hereby granted, free of charge, to any", "<reponame>constantinius/eoxserver_combined<filename>eoxserver/services/ows/wps/v10/encoders/parameters.py #------------------------------------------------------------------------------- # # WPS 1.0 parameters' XML encoders # # Project: EOxServer", "permission notice shall be included in all # copies of this Software or", "range_.minval is not None: elms.append(OWS(\"MinimumValue\", dtype.encode(range_.minval))) if range_.maxval is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval)))", "copyright notice and this permission notice shall be included in all # copies", "def encode_output_descr(prm): \"\"\" Encode process description output.\"\"\" elem = NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm,", "False)) def encode_output_exec(prm): \"\"\" Encode common part of the execure response data output.\"\"\"", "prm.formats.itervalues()]) ) def _encode_format(frmt): elem = NIL(\"Format\", NIL(\"MimeType\", frmt.mime_type)) if frmt.encoding is not", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\", str(prm.default)))", "u in prm.uoms]) )) if is_input: elem.append(_encode_allowed_value(prm.allowed_values)) if prm.default is not None: elem.append(NIL(\"DefaultValue\",", "OWS(\"AnyValue\") elif isinstance(avobj, AllowedByReference): return WPS(\"ValuesReference\", **{ ns_ows(\"reference\"): avobj.url, \"valuesForm\": avobj.url, }) elif", "associated documentation files (the \"Software\"), to deal # in the Software without restriction,", "return WPS(\"Output\", *_encode_param_common(outdef, False), **attrib) def _encode_param_common(prm, title_required=True): \"\"\" Encode common sub-elements of", "ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\", *elms, **attr)) return OWS(\"AllowedValues\", *elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\"", "hereby granted, free of charge, to any person obtaining a copy # of", "of this software and associated documentation files (the \"Software\"), to deal # in", "included in all # copies of this Software or works derived from this", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "attrib['crs'] = outdef.crs if outdef.mime_type is not None: attrib['mimeType'] = outdef.mime_type if outdef.encoding", "value object! OBJ=%r\"%avobj) dtype = avobj.dtype ddtype = dtype.get_diff_dtype() if enum is not", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode common part of the execure response", "WPS, NIL, ns_ows, ) #------------------------------------------------------------------------------- def encode_input_descr(prm): \"\"\" Encode process description input.\"\"\" elem", "*elist) #------------------------------------------------------------------------------- def _encode_complex(prm, is_input): return NIL(\"ComplexData\" if is_input else \"ComplexOutput\", NIL(\"Default\", _encode_format(prm.default_format)),", "is not None: elms.append(OWS(\"MaximumValue\", dtype.encode(range_.maxval))) if range_.spacing is not None: elms.append(OWS(\"Spacing\", ddtype.encode(range_.spacing))) elist.append(OWS(\"Range\",", "= NIL(\"Output\", *_encode_param_common(prm)) if isinstance(prm, LiteralData): elem.append(_encode_literal(prm, False)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, False))", "is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values) for range_ in ranges:", "not None: attrib['uom'] = outdef.uom if outdef.crs is not None: attrib['crs'] = outdef.crs", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "OTHER DEALINGS IN # THE SOFTWARE. #------------------------------------------------------------------------------- from eoxserver.services.ows.wps.parameters import ( LiteralData, ComplexData,", "= dtype.get_diff_dtype() if enum is not None: elist.extend(OWS(\"Value\", dtype.encode(v)) for v in enum.values)", "the execure response data input.\"\"\" return WPS(\"Input\", *_encode_param_common(prm, False)) def encode_output_exec(prm): \"\"\" Encode", "NIL(\"Default\", OWS(\"UOM\", prm.uoms[0])), NIL(\"Supported\", *[OWS(\"UOM\", u) for u in prm.uoms]) )) if is_input:", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of", "isinstance(prm, LiteralData): elem.append(_encode_literal(prm, True)) elif isinstance(prm, ComplexData): elem.append(_encode_complex(prm, True)) elif isinstance(prm, BoundingBoxData): elem.append(_encode_bbox(prm,", "derived from this Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "subject to the following conditions: # # The above copyright notice and this" ]
[ "Error uploading ' + archive_file + ' to ' + bucket + '", "downloading ' + key + ' from ' + bucket + ' bucket.", "data object to archive. This value is required. ''' if bucket is None", "archive_key: The vp_save data's location (S3 bucket and file path). This value is", "None or len(bucket) <= 0: raise ValueError() if vp_save_pk is None or len(vp_save_pk)", "try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR: Error uploading", "vp_save_pk + '.json' # Upload curation data to S3 archive bucket. s3_client =", "archive location. :param str bucket: The name of the S3 bucket for the", "as S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download a VP Save", "bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps)", "raise return archive_object def build(vp_save={}): ''' Builds and returns a valid vp_save object.", "import datetime import uuid import simplejson as json from src.db.s3_client import Client as", "' + bucket + ' bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}):", "for the archive. This value is required. :param str vp_save_pk: The vp_save PK", "save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json' #", "archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload curation data", "archive. This value is required. ''' if bucket is None or len(bucket) <=", "save data. If the upload fails, an exception is raised. If successful, returns", "If successful, returns the archive location. :param str bucket: The name of the", "a JSON file to S3. The location of the archive depends on the", "ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError() if not", "Decimal def get_from_archive(archive_key): ''' Download a VP Save from S3. :param str archive_key:", "attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data) now =", "bucket + ' bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds", "JSON file to S3. The location of the archive depends on the bucket", "<= 0: raise ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0: raise", "or len(bucket) <= 0: raise ValueError() if vp_save_pk is None or len(vp_save_pk) <=", "given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data) now", "required. :param str vp_save_pk: The vp_save PK to use as the name of", "now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data):", "vp_save_pk, save_data): ''' Archives a vp save data to S3. Uploads the save", "Archives a vp save data to S3. Uploads the save data object as", "key + ' from ' + bucket + ' bucket. ERROR\\n%s' %e) raise", "+ '/' + vp_save_pk + '.json' # Upload curation data to S3 archive", "or len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise ValueError() archive_file =", "ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds and returns a valid", "= json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading ' + key", "new vp_save object by creating default values for required fields and combines any", "value is required. :param obj save_data: The save data object to archive. This", "The vp_save PK to use as the name of the JSON file. This", "+ vp_save_pk + '.json' # Upload curation data to S3 archive bucket. s3_client", "bucket, key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal)", "bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds and returns a", "to ' + bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket,", "vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise", "save data object as a JSON file to S3. The location of the", "archive. This value is required. :param str vp_save_pk: The vp_save PK to use", "vp_save object by creating default values for required fields and combines any of", "of the archive depends on the bucket and the primary key of the", "''' if archive_key is None or '/' not in archive_key: raise ValueError() bucket,", "archive_key is None or '/' not in archive_key: raise ValueError() bucket, key =", "a VP Save from S3. :param str archive_key: The vp_save data's location (S3", "src.db.s3_client import Client as S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download", "This value is required. ''' if archive_key is None or '/' not in", "bucket is None or len(bucket) <= 0: raise ValueError() if vp_save_pk is None", "for required fields and combines any of the given attributes. ''' vp_save['PK'] =", "data's location (S3 bucket and file path). This value is required. ''' if", "is required. ''' if bucket is None or len(bucket) <= 0: raise ValueError()", "<= 0: raise ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data) +", "+ ' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def", "if not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk +", "s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e:", "' to ' + bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps =", "uuid import simplejson as json from src.db.s3_client import Client as S3Client from decimal", "Error downloading ' + key + ' from ' + bucket + '", "' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def __archive_key(save_data):", "a valid vp_save object. Builds a new vp_save object by creating default values", "to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file )", "uploading ' + archive_file + ' to ' + bucket + ' bucket.", "fields and combines any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) #", "by creating default values for required fields and combines any of the given", "is required. :param obj save_data: The save data object to archive. This value", "PK to use as the name of the JSON file. This value is", "the save data. If the upload fails, an exception is raised. If successful,", "# Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified']", "str(uuid.uuid4()) # Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now", "not in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client()", "build(vp_save={}): ''' Builds and returns a valid vp_save object. Builds a new vp_save", "bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def __archive_key(save_data): return", "of the save data. If the upload fails, an exception is raised. If", "get_from_archive(archive_key): ''' Download a VP Save from S3. :param str archive_key: The vp_save", "= archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception", "ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return '/'.join(archive_key_comps) def __archive_key(save_data): return save_data['PK']", "+ ' to ' + bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps", "json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading ' + key +", "is None or len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise ValueError()", "and returns a valid vp_save object. Builds a new vp_save object by creating", "is required. :param str vp_save_pk: The vp_save PK to use as the name", "of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new", "key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading ' + key + '", "data to S3. Uploads the save data object as a JSON file to", "Client as S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download a VP", "str archive_key: The vp_save data's location (S3 bucket and file path). This value", ":param str vp_save_pk: The vp_save PK to use as the name of the", "now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a", "S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading", "location. :param str bucket: The name of the S3 bucket for the archive.", "file path). This value is required. ''' if archive_key is None or '/'", "object to archive. This value is required. ''' if bucket is None or", "or '/' not in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client", "location (S3 bucket and file path). This value is required. ''' if archive_key", "S3. The location of the archive depends on the bucket and the primary", "S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except", "= __archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload curation data to", "= now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk,", "vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): '''", "archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading ' +", "datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def", "raised. If successful, returns the archive location. :param str bucket: The name of", "'/' not in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client =", "This value is required. :param obj save_data: The save data object to archive.", "= 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save", "as a JSON file to S3. The location of the archive depends on", "Exception as e: print('ERROR: Error downloading ' + key + ' from '", "def get_from_archive(archive_key): ''' Download a VP Save from S3. :param str archive_key: The", ":param str bucket: The name of the S3 bucket for the archive. This", "ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket,", "if vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError() if not save_data:", "not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json'", "location of the archive depends on the bucket and the primary key of", "+ '.json' # Upload curation data to S3 archive bucket. s3_client = S3Client()", "from src.db.s3_client import Client as S3Client from decimal import Decimal def get_from_archive(archive_key): '''", "archive depends on the bucket and the primary key of the save data.", "= str(uuid.uuid4()) # Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] =", "bucket, archive_file ) except Exception as e: print('ERROR: Error uploading ' + archive_file", "= now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives", "' from ' + bucket + ' bucket. ERROR\\n%s' %e) raise return archive_object", "depends on the bucket and the primary key of the save data. If", "S3. :param str archive_key: The vp_save data's location (S3 bucket and file path).", "archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception", "in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client() try:", "object as a JSON file to S3. The location of the archive depends", "object. Builds a new vp_save object by creating default values for required fields", "name of the JSON file. This value is required. :param obj save_data: The", "' + key + ' from ' + bucket + ' bucket. ERROR\\n%s'", ") except Exception as e: print('ERROR: Error uploading ' + archive_file + '", "the bucket and the primary key of the save data. If the upload", "# Upload curation data to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object(", "a vp save data to S3. Uploads the save data object as a", "' bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds and returns", "object by creating default values for required fields and combines any of the", "(S3 bucket and file path). This value is required. ''' if archive_key is", "Download a VP Save from S3. :param str archive_key: The vp_save data's location", "bucket for the archive. This value is required. :param str vp_save_pk: The vp_save", "vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created']", "the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data)", "e: print('ERROR: Error downloading ' + key + ' from ' + bucket", "and file path). This value is required. ''' if archive_key is None or", "len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data)", "+ archive_file + ' to ' + bucket + ' bucket. ERROR\\n%s' %e)", "is None or len(bucket) <= 0: raise ValueError() if vp_save_pk is None or", "the archive. This value is required. :param str vp_save_pk: The vp_save PK to", "bucket and the primary key of the save data. If the upload fails,", "vp save data to S3. Uploads the save data object as a JSON", "save data object to archive. This value is required. ''' if bucket is", "as e: print('ERROR: Error uploading ' + archive_file + ' to ' +", "ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk", "Uploads the save data object as a JSON file to S3. The location", "required. :param obj save_data: The save data object to archive. This value is", "archive_key: raise ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object", "json from src.db.s3_client import Client as S3Client from decimal import Decimal def get_from_archive(archive_key):", "+ ' from ' + bucket + ' bucket. ERROR\\n%s' %e) raise return", "vp_save PK to use as the name of the JSON file. This value", "an exception is raised. If successful, returns the archive location. :param str bucket:", "'/' + vp_save_pk + '.json' # Upload curation data to S3 archive bucket.", "S3. Uploads the save data object as a JSON file to S3. The", "None or '/' not in archive_key: raise ValueError() bucket, key = archive_key.split('/', 1)", "print('ERROR: Error downloading ' + key + ' from ' + bucket +", "bucket and file path). This value is required. ''' if archive_key is None", "if archive_key is None or '/' not in archive_key: raise ValueError() bucket, key", "''' if bucket is None or len(bucket) <= 0: raise ValueError() if vp_save_pk", "exception is raised. If successful, returns the archive location. :param str bucket: The", "vp_save data's location (S3 bucket and file path). This value is required. '''", "combines any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps", "file to S3. The location of the archive depends on the bucket and", "archive_file + ' to ' + bucket + ' bucket. ERROR\\n%s' %e) raise", "import simplejson as json from src.db.s3_client import Client as S3Client from decimal import", "raise ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload", "This value is required. ''' if bucket is None or len(bucket) <= 0:", "archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as", "decimal import Decimal def get_from_archive(archive_key): ''' Download a VP Save from S3. :param", "+ bucket + ' bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): '''", "(for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type']", "bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as", "save_data): ''' Archives a vp save data to S3. Uploads the save data", "str vp_save_pk: The vp_save PK to use as the name of the JSON", "except Exception as e: print('ERROR: Error uploading ' + archive_file + ' to", "S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download a VP Save from", "raise ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/' +", "+ ' bucket. ERROR\\n%s' %e) raise return archive_object def build(vp_save={}): ''' Builds and", "creating default values for required fields and combines any of the given attributes.", "archive_object def build(vp_save={}): ''' Builds and returns a valid vp_save object. Builds a", "save_data: The save data object to archive. This value is required. ''' if", "print('ERROR: Error uploading ' + archive_file + ' to ' + bucket +", "data to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file", "bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR: Error uploading ' +", "ValueError() archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload curation", "''' Download a VP Save from S3. :param str archive_key: The vp_save data's", "of the S3 bucket for the archive. This value is required. :param str", "and combines any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set", "'.json' # Upload curation data to S3 archive bucket. s3_client = S3Client() try:", "name of the S3 bucket for the archive. This value is required. :param", "any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for", "value is required. :param str vp_save_pk: The vp_save PK to use as the", "curation data to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket,", "raise ValueError() bucket, key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object =", "valid vp_save object. Builds a new vp_save object by creating default values for", "vp_save object. Builds a new vp_save object by creating default values for required", "as json from src.db.s3_client import Client as S3Client from decimal import Decimal def", "This value is required. :param str vp_save_pk: The vp_save PK to use as", "VP Save from S3. :param str archive_key: The vp_save data's location (S3 bucket", "from decimal import Decimal def get_from_archive(archive_key): ''' Download a VP Save from S3.", "str bucket: The name of the S3 bucket for the archive. This value", "1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e:", "is raised. If successful, returns the archive location. :param str bucket: The name", "timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now", "required. ''' if archive_key is None or '/' not in archive_key: raise ValueError()", "= S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error", "default values for required fields and combines any of the given attributes. '''", "successful, returns the archive location. :param str bucket: The name of the S3", "values for required fields and combines any of the given attributes. ''' vp_save['PK']", "from ' + bucket + ' bucket. ERROR\\n%s' %e) raise return archive_object def", "e: print('ERROR: Error uploading ' + archive_file + ' to ' + bucket", "s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR: Error uploading '", "bucket: The name of the S3 bucket for the archive. This value is", "return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data to", "import uuid import simplejson as json from src.db.s3_client import Client as S3Client from", "use as the name of the JSON file. This value is required. :param", ":param obj save_data: The save data object to archive. This value is required.", "Upload curation data to S3 archive bucket. s3_client = S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')),", "The save data object to archive. This value is required. ''' if bucket", "Save from S3. :param str archive_key: The vp_save data's location (S3 bucket and", "''' vp_save['PK'] = str(uuid.uuid4()) # Set timestamps (for new data) now = datetime.datetime.now().isoformat()", "returns the archive location. :param str bucket: The name of the S3 bucket", "the archive depends on the bucket and the primary key of the save", "= S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR:", "as e: print('ERROR: Error downloading ' + key + ' from ' +", "+ bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file] return", "to archive. This value is required. ''' if bucket is None or len(bucket)", "__archive_key(save_data) + '/' + vp_save_pk + '.json' # Upload curation data to S3", "return archive_object def build(vp_save={}): ''' Builds and returns a valid vp_save object. Builds", "import Client as S3Client from decimal import Decimal def get_from_archive(archive_key): ''' Download a", "The location of the archive depends on the bucket and the primary key", "vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data to S3.", "value is required. ''' if bucket is None or len(bucket) <= 0: raise", "a new vp_save object by creating default values for required fields and combines", "Exception as e: print('ERROR: Error uploading ' + archive_file + ' to '", "S3 bucket for the archive. This value is required. :param str vp_save_pk: The", "new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] =", "+ key + ' from ' + bucket + ' bucket. ERROR\\n%s' %e)", "now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return", "None or len(vp_save_pk) <= 0: raise ValueError() if not save_data: raise ValueError() archive_file", "of the JSON file. This value is required. :param obj save_data: The save", "vp_save_pk: The vp_save PK to use as the name of the JSON file.", "is required. ''' if archive_key is None or '/' not in archive_key: raise", "= datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save", "Set timestamps (for new data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] =", "the upload fails, an exception is raised. If successful, returns the archive location.", "the S3 bucket for the archive. This value is required. :param str vp_save_pk:", "key of the save data. If the upload fails, an exception is raised.", "as the name of the JSON file. This value is required. :param obj", ":param str archive_key: The vp_save data's location (S3 bucket and file path). This", "value is required. ''' if archive_key is None or '/' not in archive_key:", "returns a valid vp_save object. Builds a new vp_save object by creating default", "If the upload fails, an exception is raised. If successful, returns the archive", "vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp", "import Decimal def get_from_archive(archive_key): ''' Download a VP Save from S3. :param str", "Builds and returns a valid vp_save object. Builds a new vp_save object by", "and the primary key of the save data. If the upload fails, an", "raise ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError() if", "JSON file. This value is required. :param obj save_data: The save data object", "''' Archives a vp save data to S3. Uploads the save data object", "primary key of the save data. If the upload fails, an exception is", "vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save' return vp_save def archive(bucket,", "' + bucket + ' bucket. ERROR\\n%s' %e) raise archive_key_comps = [bucket, archive_file]", "try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR: Error downloading '", "obj save_data: The save data object to archive. This value is required. '''", "def build(vp_save={}): ''' Builds and returns a valid vp_save object. Builds a new", "required fields and combines any of the given attributes. ''' vp_save['PK'] = str(uuid.uuid4())", "archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data to S3. Uploads the", "path). This value is required. ''' if archive_key is None or '/' not", "S3Client() try: s3_client.put_object( bytes(json.dumps(save_data).encode('UTF-8')), bucket, archive_file ) except Exception as e: print('ERROR: Error", "the JSON file. This value is required. :param obj save_data: The save data", "fails, an exception is raised. If successful, returns the archive location. :param str", "simplejson as json from src.db.s3_client import Client as S3Client from decimal import Decimal", "is None or '/' not in archive_key: raise ValueError() bucket, key = archive_key.split('/',", "0: raise ValueError() if not save_data: raise ValueError() archive_file = __archive_key(save_data) + '/'", "key = archive_key.split('/', 1) s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except", "len(bucket) <= 0: raise ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0:", "from S3. :param str archive_key: The vp_save data's location (S3 bucket and file", "The vp_save data's location (S3 bucket and file path). This value is required.", "to S3. Uploads the save data object as a JSON file to S3.", "the archive location. :param str bucket: The name of the S3 bucket for", "Builds a new vp_save object by creating default values for required fields and", "data. If the upload fails, an exception is raised. If successful, returns the", "0: raise ValueError() if vp_save_pk is None or len(vp_save_pk) <= 0: raise ValueError()", "datetime import uuid import simplejson as json from src.db.s3_client import Client as S3Client", "'vp_save' return vp_save def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data", "file. This value is required. :param obj save_data: The save data object to", "%e) raise return archive_object def build(vp_save={}): ''' Builds and returns a valid vp_save", "The name of the S3 bucket for the archive. This value is required.", "to use as the name of the JSON file. This value is required.", "required. ''' if bucket is None or len(bucket) <= 0: raise ValueError() if", "data) now = datetime.datetime.now().isoformat() vp_save['date_created'] = now vp_save['last_modified'] = now vp_save['item_type'] = 'vp_save'", "to S3. The location of the archive depends on the bucket and the", "''' Builds and returns a valid vp_save object. Builds a new vp_save object", "except Exception as e: print('ERROR: Error downloading ' + key + ' from", "the save data object as a JSON file to S3. The location of", "archive_file ) except Exception as e: print('ERROR: Error uploading ' + archive_file +", "def archive(bucket, vp_save_pk, save_data): ''' Archives a vp save data to S3. Uploads", "on the bucket and the primary key of the save data. If the", "data object as a JSON file to S3. The location of the archive", "upload fails, an exception is raised. If successful, returns the archive location. :param", "s3_client = S3Client() try: archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal) except Exception as e: print('ERROR:", "' + archive_file + ' to ' + bucket + ' bucket. ERROR\\n%s'", "the name of the JSON file. This value is required. :param obj save_data:", "save data to S3. Uploads the save data object as a JSON file", "the primary key of the save data. If the upload fails, an exception", "if bucket is None or len(bucket) <= 0: raise ValueError() if vp_save_pk is" ]
[ "plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x", "= brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0,", "plt from brokenaxes import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax", "import matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy as np fig", "basic usage of brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes import brokenaxes", "usage of brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes import brokenaxes import", "Basic usage =========== This example presents the basic usage of brokenaxes \"\"\" import", "brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4,", "of brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy", "1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x,", "matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy as np fig =", "as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7),", "bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x =", "1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x), label='cos') bax.legend(loc=3)", "the basic usage of brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes import", "\"\"\" import matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy as np", ".7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x),", "hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10", "= np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x),", "(.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x,", "import numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)),", "as plt from brokenaxes import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2))", "= plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05)", "ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 *", "import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1),", "fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)),", "100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x), label='cos') bax.legend(loc=3) bax.set_xlabel('time')", "This example presents the basic usage of brokenaxes \"\"\" import matplotlib.pyplot as plt", ".1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100)", "brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes import brokenaxes import numpy as", "numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1,", "example presents the basic usage of brokenaxes \"\"\" import matplotlib.pyplot as plt from", "brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1,", "np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x), label='cos')", "presents the basic usage of brokenaxes \"\"\" import matplotlib.pyplot as plt from brokenaxes", "x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 *", "usage =========== This example presents the basic usage of brokenaxes \"\"\" import matplotlib.pyplot", "=========== This example presents the basic usage of brokenaxes \"\"\" import matplotlib.pyplot as", "bax.plot(x, np.sin(10 * x), label='sin') bax.plot(x, np.cos(10 * x), label='cos') bax.legend(loc=3) bax.set_xlabel('time') bax.set_ylabel('value')", "from brokenaxes import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax =", "brokenaxes import brokenaxes import numpy as np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0,", "(.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10 * x), label='sin')", ".7)), ylims=((-1, .7), (.79, 1)), hspace=.05) x = np.linspace(0, 1, 100) bax.plot(x, np.sin(10", "np fig = plt.figure(figsize=(5,2)) bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79,", "\"\"\" Basic usage =========== This example presents the basic usage of brokenaxes \"\"\"" ]
[ "suffix = tmp[1] ln = suffix ln_org = \"\" if ln in ln_map:", "wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if os.path.exists(db_path): with open(db_path)", "{} if os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with", "RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in", "label if \"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label,", "prefix = tmp[0] suffix = tmp[1] ln = suffix ln_org = \"\" if", "as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db:", "URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif", "places: places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' #", "# 位置情報 ''' if \"point\" in obj and prefix == \"place\": value =", "URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in obj and prefix == \"place\":", "[] for obj in result: fields = [\"spatial\", \"agential\"] for field in fields:", "if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"]", "in ln_map: ln_org = ln ln = ln_map[ln] if len(ln) > 20: continue", "elif prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix ==", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"),", "= Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path = \"../data/index.json\" with", "RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\"", "BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import", "in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\"", "label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj:", "in uris: uris.append(uri) for uri in uris: print(uri) tmp = uri.split(\":\") prefix =", "ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path) as f: result = json.load(f)", "all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt", "open(wiki_path) as f: wiki = json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url))", "in values: uri = \"chname:\"+value if field == \"spatial\": uri = \"place:\"+value if", "Namespace all = Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path =", "= (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"]", "= (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld') all.serialize(destination=path.replace(\".json\", \".rdf\"), format='pretty-xml')", "ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld') all.serialize(destination=path.replace(\".json\",", "prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\":", "description if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"),", "obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"] if \"http://dbpedia.org\"", "import urllib import ssl import csv import time import requests import json import", "(subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj and \"ja\"", "yaml import sys import urllib import ssl import csv import time import requests", "csv from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS,", "if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in", "= {} wiki = {} if os.path.exists(db_path): with open(db_path) as f: db =", "stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\"", "in labels: if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if", "obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"] if \"http://dbpedia.org\" in value or", "= [] for obj in result: fields = [\"spatial\", \"agential\"] for field in", "== \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt", "\"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt =", "elif prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj", "obj[field] for value in values: uri = \"chname:\"+value if field == \"spatial\": uri", "\"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt)", "== \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels", "} all.add(stmt) ''' # 正規化前 if ln_org != \"\" and ln != ln_org:", "if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt", "\"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix", "= json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri =", "value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"),", "db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" , db_uri) continue #", "XSD from rdflib import Namespace all = Graph() with open(\"data/dict.json\") as f: ln_map", "suffix ln_org = \"\" if ln in ln_map: ln_org = ln ln =", "= \"chname:\"+value if field == \"spatial\": uri = \"place:\"+value if uri not in", "URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in", "= uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln = suffix ln_org =", "= obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1])", "stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"]", "if ln in ln_map: ln_org = ln ln = ln_map[ln] if len(ln) >", "or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 '''", "in places: places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) '''", "ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {}", "stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj", ": float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not", "import Namespace all = Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path", "== \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\" :", "or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value))", "(subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"]))", "ln_map: ln_org = ln ln = ln_map[ln] if len(ln) > 20: continue #", "tmp = uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln = suffix ln_org", "labels: value = label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or", "{} wiki = {} if os.path.exists(db_path): with open(db_path) as f: db = json.load(f)", "# description if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject,", "= json.load(f) st_path = \"../data/index.json\" with open(st_path) as f: result = json.load(f) uris", "# label if \"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt = (subject,", "\"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places:", "URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix] = { \"lat\" : float(value[0]),", "f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\"", "shutil import os import json import glob import yaml import sys import urllib", "time import requests import json import csv from rdflib import URIRef, BNode, Literal,", "if db_uri not in db: print(\"not\" , db_uri) continue # ###### subject =", "== \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt", "open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt)", "import RDF, RDFS, FOAF, XSD from rdflib import Namespace all = Graph() with", "json import csv from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import", "f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f)", "import json import csv from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt)", "with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f:", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt = (subject,", "[\"spatial\", \"agential\"] for field in fields: values = obj[field] for value in values:", "(subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path =", "stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in obj", "\"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt =", "in labels: value = label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt = (subject, RDF.type,", "''' # 正規化前 if ln_org != \"\" and ln != ln_org: stmt =", "ln = suffix ln_org = \"\" if ln in ln_map: ln_org = ln", "\"place:\"+value if uri not in uris: uris.append(uri) for uri in uris: print(uri) tmp", "if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if", "len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki", "(subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt = (subject,", "lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt", "json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln", "uris: uris.append(uri) for uri in uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0]", "not in uris: uris.append(uri) for uri in uris: print(uri) tmp = uri.split(\":\") prefix", "{ \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt = (subject,", "result = json.load(f) uris = [] for obj in result: fields = [\"spatial\",", "float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in", "obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path =", "uri = \"chname:\"+value if field == \"spatial\": uri = \"place:\"+value if uri not", "value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\":", "正規化前 if ln_org != \"\" and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"),", "= \"../data/index.json\" with open(st_path) as f: result = json.load(f) uris = [] for", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt = (subject, RDF.type,", "obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as", "\"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt", "\"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj:", "''' if \"point\" in obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \")", "json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] #", "with open(wiki_path) as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not", "= \"\" if ln in ln_map: ln_org = ln ln = ln_map[ln] if", "\"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]),", "URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and \"ja\"", "wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki", "obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels =", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt)", "obj and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln =", "for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\"", "all.add(stmt) # 位置情報 ''' if \"point\" in obj and prefix == \"place\": value", "= addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri)", "= tmp[1] ln = suffix ln_org = \"\" if ln in ln_map: ln_org", "stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\"", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri))", "in value or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) #", "= obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path)", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt = (subject,", "in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"] if", "if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"],", "addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if", "rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD", "= (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for", "all.add(stmt) elif prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ######", "uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln = suffix ln_org = \"\"", "in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt = (subject,", "and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1]", "in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path", "continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject,", "= obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt)", "elif prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix ==", "for field in fields: values = obj[field] for value in values: uri =", "if len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\"", "ln = ln_map[ln] if len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1] '''", "obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"],", "= (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix] = { \"lat\"", "= tmp[0] suffix = tmp[1] ln = suffix ln_org = \"\" if ln", "ln_org != \"\" and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt)", "= URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif", "in uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln", "and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\"", "\"../data/index.json\" with open(st_path) as f: result = json.load(f) uris = [] for obj", "import os import json import glob import yaml import sys import urllib import", "uris = [] for obj in result: fields = [\"spatial\", \"agential\"] for field", "with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path) as", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt = (subject, RDF.type,", "FOAF, XSD from rdflib import Namespace all = Graph() with open(\"data/dict.json\") as f:", "print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln = suffix", "fields: values = obj[field] for value in values: uri = \"chname:\"+value if field", "\"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if os.path.exists(db_path): with", "= (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path", "import yaml import sys import urllib import ssl import csv import time import", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt)", "stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix] = {", "addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt = (subject,", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt = (subject,", "all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value", "}) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix] =", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\"))", "import glob import yaml import sys import urllib import ssl import csv import", "db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt =", "\"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\"))", "uri in uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix = tmp[1]", "\"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" , db_uri) continue # ###### subject", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\"))", "all.add(stmt) elif prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix", "if os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path)", "URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label", "os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) # sameAs stmt = (subject,", "== \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt", "value = label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\"", "= json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" , db_uri)", "if \"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"]))", "obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] == \"ja\": stmt", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt = (subject,", "(subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label", "rdflib import Namespace all = Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f)", "ln_org = ln ln = ln_map[ln] if len(ln) > 20: continue # ln", "Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db", "ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path,", "labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels", "obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj:", "\"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if", "= [\"spatial\", \"agential\"] for field in fields: values = obj[field] for value in", "stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld') all.serialize(destination=path.replace(\".json\", \".rdf\"),", "requests import json import csv from rdflib import URIRef, BNode, Literal, Graph from", "not in db: print(\"not\" , db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if", "in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj:", "RDF, RDFS, FOAF, XSD from rdflib import Namespace all = Graph() with open(\"data/dict.json\")", "\"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln = wiki_url.split(\"/\")[-1] '''", "all.add(stmt) # ###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if", "prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\":", "st_path = \"../data/index.json\" with open(st_path) as f: result = json.load(f) uris = []", "\"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f)", "if suffix not in places: places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1])", "for obj in result: fields = [\"spatial\", \"agential\"] for field in fields: values", "stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject,", "\"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt =", "uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix = tmp[1] ln =", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt = (subject, RDF.type,", "import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from", "= db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt", "== \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt", "labels: if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\"", "label in labels: value = label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in", "from rdflib import Namespace all = Graph() with open(\"data/dict.json\") as f: ln_map =", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt)", "float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix]", "in obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri", "labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"] if \"http://dbpedia.org\" in", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt =", "URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\"))", "import time import requests import json import csv from rdflib import URIRef, BNode,", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\"))", "obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in", "# addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt =", "if uri not in uris: uris.append(uri) for uri in uris: print(uri) tmp =", "db = {} wiki = {} if os.path.exists(db_path): with open(db_path) as f: db", "not in places: places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt)", "tmp[1] ln = suffix ln_org = \"\" if ln in ln_map: ln_org =", "20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {}", "(subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in obj and prefix", "# sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description", "= obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] == \"ja\": stmt = (subject,", "!= ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path = \"data/all.json\" all.serialize(destination=path, format='json-ld')", "labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] == \"ja\": stmt =", "db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri", "elif prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix ==", "values: uri = \"chname:\"+value if field == \"spatial\": uri = \"place:\"+value if uri", "in obj and \"ja\" in obj[\"labels\"]: stmt = (subject, RDFS.label, Literal(obj[\"labels\"][\"ja\"][\"value\"])) all.add(stmt) ln", "uris.append(uri) for uri in uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix", "ln in ln_map: ln_org = ln ln = ln_map[ln] if len(ln) > 20:", "\"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt =", "if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt =", "= ln ln = ln_map[ln] if len(ln) > 20: continue # ln =", "= {} if os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path):", "for uri in uris: print(uri) tmp = uri.split(\":\") prefix = tmp[0] suffix =", "URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib", "ssl import csv import time import requests import json import csv from rdflib", "float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org != \"\" and ln !=", "import csv import time import requests import json import csv from rdflib import", "== \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri]", "elif prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix ==", "labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"]))", "Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in", "all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\"", "label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value:", "stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt = (subject,", "all.add(stmt) # label if \"labels\" in obj and \"ja\" in obj[\"labels\"]: stmt =", "ln ln = ln_map[ln] if len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1]", ", db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt", "obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]:", "\"point\" in obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数", "label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in", "if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) # sameAs stmt =", "URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix", "as f: result = json.load(f) uris = [] for obj in result: fields", "import json import glob import yaml import sys import urllib import ssl import", "= (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for", "Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path)", "import shutil import os import json import glob import yaml import sys import", "obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) #", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\"))", "all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and \"ja\" in", "os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as", "= wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki", "###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in", "RDFS, FOAF, XSD from rdflib import Namespace all = Graph() with open(\"data/dict.json\") as", "f: ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path) as f: result =", "all.add(stmt) elif prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix", "all.add(stmt) ''' # 正規化前 if ln_org != \"\" and ln != ln_org: stmt", "json.load(f) uris = [] for obj in result: fields = [\"spatial\", \"agential\"] for", "= \"place:\"+value if uri not in uris: uris.append(uri) for uri in uris: print(uri)", "value or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報", "# 正規化前 if ln_org != \"\" and ln != ln_org: stmt = (subject,", "fields = [\"spatial\", \"agential\"] for field in fields: values = obj[field] for value", "\"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org !=", "= obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"] if \"http://dbpedia.org\" in value", "\"\" if ln in ln_map: ln_org = ln ln = ln_map[ln] if len(ln)", "import csv from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF,", "wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki =", "in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] == \"ja\":", "in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"),", "print(\"not\" , db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\":", "URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif", "\") # addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt", "(subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label", "if \"point\" in obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") #", "\"spatial\": uri = \"place:\"+value if uri not in uris: uris.append(uri) for uri in", "\"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if", "###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\"))", "places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前", "obj in result: fields = [\"spatial\", \"agential\"] for field in fields: values =", "db: print(\"not\" , db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix ==", "float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org != \"\" and", "value in values: uri = \"chname:\"+value if field == \"spatial\": uri = \"place:\"+value", "db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt =", "prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\":", "= ln_map[ln] if len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path", "{} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) # sameAs stmt", "json.load(f) st_path = \"../data/index.json\" with open(st_path) as f: result = json.load(f) uris =", "with open(st_path) as f: result = json.load(f) uris = [] for obj in", "\"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels =", "RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt)", "# ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path):", "import requests import json import csv from rdflib import URIRef, BNode, Literal, Graph", "Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import Namespace", "as f: wiki = json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt)", "if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in", "\"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] ==", "= suffix ln_org = \"\" if ln in ln_map: ln_org = ln ln", "result: fields = [\"spatial\", \"agential\"] for field in fields: values = obj[field] for", "as f: db = json.load(f) if os.path.exists(wiki_path): with open(wiki_path) as f: wiki =", "\"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt)", "ln_map[ln] if len(ln) > 20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path =", "URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif", "prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj =", "if ln_org != \"\" and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org))", "(subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for", "all = Graph() with open(\"data/dict.json\") as f: ln_map = json.load(f) st_path = \"../data/index.json\"", "= json.load(f) uris = [] for obj in result: fields = [\"spatial\", \"agential\"]", "in obj and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt)", "= (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in", "in fields: values = obj[field] for value in values: uri = \"chname:\"+value if", "from rdflib import URIRef, BNode, Literal, Graph from rdflib.namespace import RDF, RDFS, FOAF,", "wiki = {} if os.path.exists(db_path): with open(db_path) as f: db = json.load(f) if", "URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif", "open(wiki_path) as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in", "db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if", "\"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in value: stmt =", "URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt)", "stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels =", "if field == \"spatial\": uri = \"place:\"+value if uri not in uris: uris.append(uri)", "uri not in uris: uris.append(uri) for uri in uris: print(uri) tmp = uri.split(\":\")", "''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f:", "Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj and \"ja\" in obj[\"labels\"]:", "obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) })", "uri = \"place:\"+value if uri not in uris: uris.append(uri) for uri in uris:", "elif prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix ==", "''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki = {}", "\"\" and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path =", "\"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix == \"org\": stmt =", "== \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt", "as f: ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path) as f: result", "wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) #", "and \"ja\" in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label", "!= \"\" and ln != ln_org: stmt = (subject, URIRef(\"http://schema.org/name\"), Literal(ln_org)) all.add(stmt) path", "# ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject, RDF.type,", "glob import yaml import sys import urllib import ssl import csv import time", "= \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" , db_uri) continue # ######", "= {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) # sameAs", "subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt)", "= (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj and", "URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif", "f: wiki = json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj", "for label in labels: if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"]))", "obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label,", "all.add(stmt) elif prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\")) all.add(stmt) elif prefix", "prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({ \"lat\"", "geoUri = addGeo({ \"lat\" : float(value[0]), \"long\": float(value[1]) }) stmt = (subject, URIRef(\"http://schema.org/geo\"),", "geoUri) if suffix not in places: places[suffix] = { \"lat\" : float(value[0]), \"long\":", "in result: fields = [\"spatial\", \"agential\"] for field in fields: values = obj[field]", "in obj[\"descriptions\"]: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\"", "field == \"spatial\": uri = \"place:\"+value if uri not in uris: uris.append(uri) for", "URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix == \"type\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) #", "prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\":", "open(\"data/dict.json\") as f: ln_map = json.load(f) st_path = \"../data/index.json\" with open(st_path) as f:", "if prefix == \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix ==", "all.add(stmt) elif prefix == \"keyword\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Keyword\")) all.add(stmt) elif prefix", "for label in labels: value = label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\"", "URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if", "if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value =", "obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri =", "label in labels: if label[\"lang\"] == \"ja\": stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt)", "import sys import urllib import ssl import csv import time import requests import", "ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with", "lang=label[\"lang\"])) all.add(stmt) if \"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels:", "open(st_path) as f: result = json.load(f) uris = [] for obj in result:", "f: result = json.load(f) uris = [] for obj in result: fields =", "\"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject,", "urllib import ssl import csv import time import requests import json import csv", "wiki = json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj =", "rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import Namespace all = Graph()", "\"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if os.path.exists(db_path): with open(db_path) as f:", "= (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\" in obj: stmt = (subject, URIRef(\"http://schema.org/image\"),", "from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import Namespace all =", "= \"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if os.path.exists(db_path): with open(db_path) as", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt = (subject, RDF.type,", "value: stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in", "= (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in obj and", "csv import time import requests import json import csv from rdflib import URIRef,", "with open(wiki_path) as f: wiki = json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"),", "sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if", "suffix not in places: places[suffix] = { \"lat\" : float(value[0]), \"long\": float(value[1]) }", "= { \"lat\" : float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if", "位置情報 ''' if \"point\" in obj and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\"", "in obj: stmt = (subject, URIRef(\"http://schema.org/image\"), URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels", "\"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt =", "values = obj[field] for value in values: uri = \"chname:\"+value if field ==", "= \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db = {} wiki = {} if os.path.exists(db_path):", "for value in values: uri = \"chname:\"+value if field == \"spatial\": uri =", "db_uri not in db: print(\"not\" , db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln)", "== \"chname\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Agent\")) all.add(stmt) elif prefix == \"time\": stmt", "and prefix == \"place\": value = obj[\"point\"][\"value\"].split(\" \") # addGeo関数 geoUri = addGeo({", "sys import urllib import ssl import csv import time import requests import json", "(subject, URIRef(\"http://schema.org/geo\"), geoUri) if suffix not in places: places[suffix] = { \"lat\" :", "= wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt", "in db: print(\"not\" , db_uri) continue # ###### subject = URIRef(\"https://shibusawa-dlab.github.io/lab1/api/\"+prefix+\"/\"+ln) if prefix", "\"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org != \"\" and ln", "\"chname:\"+value if field == \"spatial\": uri = \"place:\"+value if uri not in uris:", "stmt = (subject, RDFS.label, Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"]", "URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(value)) all.add(stmt) # 位置情報 ''' if \"point\" in obj and prefix ==", "continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki = {} if", "== \"spatial\": uri = \"place:\"+value if uri not in uris: uris.append(uri) for uri", "all.add(stmt) elif prefix == \"time\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Time\")) all.add(stmt) elif prefix", "prefix == \"org\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\":", "> 20: continue # ln = obj[\"uri\"].split(\":\")[1] ''' wiki_path = \"data/wikidata/\"+ln+\".json\" wiki =", "Graph from rdflib.namespace import RDF, RDFS, FOAF, XSD from rdflib import Namespace all", "= (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Organization\")) all.add(stmt) elif prefix == \"keyword\": stmt = (subject, RDF.type,", "json import glob import yaml import sys import urllib import ssl import csv", "json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" , db_uri) continue", "wiki[\"entities\"][wiki_url.split(\"/\")[-1]] # description if \"descriptions\" in obj and \"ja\" in obj[\"descriptions\"]: stmt =", "tmp[0] suffix = tmp[1] ln = suffix ln_org = \"\" if ln in", "\"http://www.w3.org/2002/07/owl#sameAs\" in obj: labels = obj[\"http://www.w3.org/2002/07/owl#sameAs\"] for label in labels: value = label[\"value\"]", "os.path.exists(wiki_path): with open(wiki_path) as f: wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri", "os import json import glob import yaml import sys import urllib import ssl", "# ###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt) if \"http://dbpedia.org/ontology/thumbnail\"", "\"agential\"] for field in fields: values = obj[field] for value in values: uri", "obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels: stmt = (subject, URIRef(\"http://schema.org/description\"), Literal(label[\"value\"], lang=label[\"lang\"])) all.add(stmt) if", "(subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Event\"))", "= label[\"value\"] if \"http://dbpedia.org\" in value or \"http://ja.dbpedia.org\" in value or \"www.wikidata.org\" in", "field in fields: values = obj[field] for value in values: uri = \"chname:\"+value", "Literal(label[\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#comment\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#comment\"] for label in labels:", ": float(value[0]), \"long\": float(value[1]) } all.add(stmt) ''' # 正規化前 if ln_org != \"\"", "all.add(stmt) ln = wiki_url.split(\"/\")[-1] ''' db_path = \"data/dbpedia_ja/\"+ln+\".json\" wiki_path = \"data/wikidata/\"+ln+\".json\" db =", "= json.load(f) # sameAs stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(wiki_url)) all.add(stmt) obj = wiki[\"entities\"][wiki_url.split(\"/\")[-1]]", "prefix == \"place\": stmt = (subject, RDF.type, URIRef(\"https://jpsearch.go.jp/term/type/Place\")) all.add(stmt) elif prefix == \"event\":", "URIRef(\"https://jpsearch.go.jp/term/type/Type\")) all.add(stmt) # ###### obj = db[db_uri] stmt = (subject, URIRef(\"http://www.w3.org/2002/07/owl#sameAs\"), URIRef(db_uri)) all.add(stmt)", "= obj[field] for value in values: uri = \"chname:\"+value if field == \"spatial\":", "URIRef(\"http://schema.org/description\"), Literal(obj[\"descriptions\"][\"ja\"][\"value\"], lang=\"ja\")) all.add(stmt) # label if \"labels\" in obj and \"ja\" in", "ln_org = \"\" if ln in ln_map: ln_org = ln ln = ln_map[ln]", "URIRef(obj[\"http://dbpedia.org/ontology/thumbnail\"][0][\"value\"])) all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels:", "wiki = json.load(f) db_uri = \"http://ja.dbpedia.org/resource/\"+ln if db_uri not in db: print(\"not\" ,", "import ssl import csv import time import requests import json import csv from", "= \"data/wikidata/\"+ln+\".json\" wiki = {} if os.path.exists(wiki_path): with open(wiki_path) as f: wiki =", "all.add(stmt) if \"http://www.w3.org/2000/01/rdf-schema#label\" in obj: labels = obj[\"http://www.w3.org/2000/01/rdf-schema#label\"] for label in labels: if" ]
[ "configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print')", "bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for i in", "@patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we", "} open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i:", "[0, 1, 2]) all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4,", "@patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if", "print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we can perform a whole", "= [{'id': i} for i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for", "import * import bootloader_read_config from commands import * import sys import json class", "[None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i in", "= \"test.py -p /dev/ttyUSB0 --all\".split() # The first two board answers the ping", "sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn,", "open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs =", "open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i]", "range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in range(3) } open_conn.return_value =", "from msgpack import * import bootloader_read_config from commands import * import sys import", "object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for i", "for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def", "write_command_retry.return_value = { i: packb(configs[i]) for i in range(3) } open_conn.return_value = object()", "{ i: packb(configs[i]) for i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value,", "discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The first two board", "-p /dev/ttyUSB0 --all\".split() # The first two board answers the ping board_answers =", "mock import * from msgpack import * import bootloader_read_config from commands import *", "print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs", "first two board answers the ping board_answers = [(b'', [0], i) for i", "bootloader_read_config from commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command')", "= {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command')", "packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value, encode_ping(), list(range(1, 128)))", "if we can perform a whole network discovery. \"\"\" sys.argv = \"test.py -p", "/dev/ttyUSB0 --all\".split() # The first two board answers the ping board_answers = [(b'',", "indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry,", "sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs = [{'id': i} for", "write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for i in range(3)}", "2]) all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection')", "= [(b'', [0], i) for i in range(1, 3)] + [None] read_can_datagram.return_value =", "* except ImportError: from mock import * from msgpack import * import bootloader_read_config", "@patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we can", "we can perform a whole network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0", "try: from unittest.mock import * except ImportError: from mock import * from msgpack", "i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i:", "range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs =", "\"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The first two board answers", "write_command_retry, open_conn): \"\"\" Checks if we can perform a whole network discovery. \"\"\"", "= iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i in range(1, 3)", "@patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\"", "commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print')", "i) for i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value =", "encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs,", "unittest.mock import * except ImportError: from mock import * from msgpack import *", "in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id':", "--all\".split() # The first two board answers the ping board_answers = [(b'', [0],", "except ImportError: from mock import * from msgpack import * import bootloader_read_config from", "perform a whole network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() #", "1 2\".split() configs = [{'id': i} for i in range(3)] write_command_retry.return_value = {", "+ [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i", "ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv =", "class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv", "import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command,", "def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we can perform", "ImportError: from mock import * from msgpack import * import bootloader_read_config from commands", "network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The first two", "[{'id': i} for i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i", "two board answers the ping board_answers = [(b'', [0], i) for i in", "a whole network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The", "2\".split() configs = [{'id': i} for i in range(3)] write_command_retry.return_value = { i:", "answers the ping board_answers = [(b'', [0], i) for i in range(1, 3)]", "the ping board_answers = [(b'', [0], i) for i in range(1, 3)] +", "unittest try: from unittest.mock import * except ImportError: from mock import * from", "= { i: packb(configs[i]) for i in range(3) } open_conn.return_value = object() bootloader_read_config.main()", "0 1 2\".split() configs = [{'id': i} for i in range(3)] write_command_retry.return_value =", "test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we can perform a", "iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i in range(1, 3) }", "from mock import * from msgpack import * import bootloader_read_config from commands import", "read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for i in range(1,", "i: packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value, encode_ping(), list(range(1,", "{i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams')", "@patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0", "msgpack import * import bootloader_read_config from commands import * import sys import json", "i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2])", "can perform a whole network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split()", "packb(configs[i]) for i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0,", "sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn):", "@patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks", "* import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self,", "i} for i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in", "-p /dev/ttyUSB0 0 1 2\".split() configs = [{'id': i} for i in range(3)]", "The first two board answers the ping board_answers = [(b'', [0], i) for", "sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The first two board answers the", "i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in range(3) }", "print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram, write_command,", "write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs = [{'id': i}", "/dev/ttyUSB0 0 1 2\".split() configs = [{'id': i} for i in range(3)] write_command_retry.return_value", "# The first two board answers the ping board_answers = [(b'', [0], i)", "@patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py", "import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock,", "import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def", "board_answers = [(b'', [0], i) for i in range(1, 3)] + [None] read_can_datagram.return_value", "\"test.py -p /dev/ttyUSB0 --all\".split() # The first two board answers the ping board_answers", "for i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = {", "* from msgpack import * import bootloader_read_config from commands import * import sys", "read_can_datagram, write_command, write_command_retry, open_conn): \"\"\" Checks if we can perform a whole network", "from unittest.mock import * except ImportError: from mock import * from msgpack import", "\"\"\" Checks if we can perform a whole network discovery. \"\"\" sys.argv =", "whole network discovery. \"\"\" sys.argv = \"test.py -p /dev/ttyUSB0 --all\".split() # The first", "from commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection')", "range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock, read_can_datagram,", "in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs", "Checks if we can perform a whole network discovery. \"\"\" sys.argv = \"test.py", "import * from msgpack import * import bootloader_read_config from commands import * import", "write_command_retry.return_value = { i: packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main()", "= object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1, 2]) all_configs = {i: configs[i] for", "1, 2]) all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True))", "i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self,", "import bootloader_read_config from commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry')", "for i in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in range(3)", "configs = [{'id': i} for i in range(3)] write_command_retry.return_value = { i: packb(configs[i])", "ping board_answers = [(b'', [0], i) for i in range(1, 3)] + [None]", "i: packb(configs[i]) for i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(),", "= \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs = [{'id': i} for i", "[0], i) for i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value", "open_conn): \"\"\" Checks if we can perform a whole network discovery. \"\"\" sys.argv", "3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i}) for", "@patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0", "import * except ImportError: from mock import * from msgpack import * import", "* import bootloader_read_config from commands import * import sys import json class ReadConfigToolTestCase(unittest.TestCase):", "[(b'', [0], i) for i in range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers)", "@patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p", "for i in range(3) } open_conn.return_value = object() bootloader_read_config.main() write_command_retry.assert_any_call(open_conn.return_value, encode_read_config(), [0, 1,", "range(1, 3)] + [None] read_can_datagram.return_value = iter(board_answers) write_command_retry.return_value = { i: packb({'id': i})", "\"test.py -p /dev/ttyUSB0 0 1 2\".split() configs = [{'id': i} for i in", "import unittest try: from unittest.mock import * except ImportError: from mock import *", "in range(3)] write_command_retry.return_value = { i: packb(configs[i]) for i in range(3) } open_conn.return_value", "in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.read_can_datagrams') @patch('builtins.print') def test_network_discovery(self, print_mock,", "write_command, write_command_retry, open_conn): \"\"\" Checks if we can perform a whole network discovery.", "all_configs = {i: configs[i] for i in range(3)} print_mock.assert_any_call(json.dumps(all_configs, indent=4, sort_keys=True)) @patch('utils.open_connection') @patch('utils.write_command_retry')", "def test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1", "write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split() configs = [{'id':", "= { i: packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value,", "board answers the ping board_answers = [(b'', [0], i) for i in range(1,", "json class ReadConfigToolTestCase(unittest.TestCase): @patch('utils.write_command_retry') @patch('utils.write_command') @patch('utils.open_connection') @patch('builtins.print') def test_integration(self, print_mock, open_conn, write_command, write_command_retry):", "test_integration(self, print_mock, open_conn, write_command, write_command_retry): sys.argv = \"test.py -p /dev/ttyUSB0 0 1 2\".split()", "{ i: packb({'id': i}) for i in range(1, 3) } bootloader_read_config.main() write_command.assert_any_call(open_conn.return_value, encode_ping()," ]
[ "in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException):", "ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54,", "requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username,", "async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message", "*args): try: msg = await review_embed(username, ' '.join(str(i) for i in args)) except", "'') await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {}", "def on_ready(): logging.info( 'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats())", "in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg = await film_embed(arg) except", "permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif", "msg = await diary_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx,", "= discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664',", "await ctx.send('This command requires the {} permission.'.format( ', '.join(err for err in error.missing_perms)))", "'.join(str(i) for i in args)) except LetterboxdError as err: msg = err await", "await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format(", "from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot =", "break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await", "msg = await list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as", "send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd", "from film import film_embed from helpers import LetterboxdError from list_ import list_embed from", ":/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg)", "except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def", "await film_embed(arg) except LetterboxdError as err: msg = err await send_msg(ctx, msg) async", "msg = await film_embed(arg) except LetterboxdError as err: msg = err await send_msg(ctx,", "await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg =", "(commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry,", "@bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message)", "ctx.send('This command requires the {} permission.'.format( ', '.join(err for err in error.missing_perms))) elif", "% (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’',", "{} permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return", "discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed", "= await diary_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg)", "username, *args): try: msg = await review_embed(username, ' '.join(str(i) for i in args))", "for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if", "await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me ratings", "def film(ctx, *, arg): try: # eiga.me ratings for specific servers if ctx.guild", "bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' + alias) async for", "text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description", "review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d", "3: await ctx.send('This command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args)", "def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list = list()", "the command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg,", "as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def", "False cmd_list = list() for command in bot.commands: cmd_list.append('!' + command.name) for alias", "as err: msg = err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username):", "elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed.", "= True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word", "log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd = True", "err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx,", "check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires 2", "+ alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not", "arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg =", "isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ', '.join(err for err", "list_(ctx, username, *args): try: msg = await list_embed(username, ' '.join(str(i) for i in", "command requires the {} permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error,", "review(ctx, username, *args): try: msg = await review_embed(username, ' '.join(str(i) for i in", "on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error,", "*, arg): try: # eiga.me ratings for specific servers if ctx.guild and ctx.guild.id", "activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if", "= err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg)", "@bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False", "'').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb -", "parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try:", "in args)) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(name='del')", "i in args)) except LetterboxdError as err: msg = err await send_msg(ctx, msg)", "msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg = await review_embed(username,", "cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id ==", "await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires", "@bot.event async def on_ready(): logging.info( 'Logged in %d servers as %s' % (len(bot.guilds),", "command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {}", "discord from discord.ext import commands from config import SETTINGS from crew import crew_embed", "inline=False) help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async", "logging from asyncio import sleep import discord from discord.ext import commands from config", "username): try: msg = await user_embed(username) except LetterboxdError as err: msg = err", "error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return", "in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' + alias) async", "from crew import crew_embed from diary import diary_embed from film import film_embed from", "async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg')", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async", "' '.join(str(i) for i in args)) except LetterboxdError as err: msg = err", "async def review(ctx, username, *args): try: msg = await review_embed(username, ' '.join(str(i) for", "requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format(", "send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try: msg =", "async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list =", "from list_ import list_embed from review import review_embed from user import user_embed logging.basicConfig(", "cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' + alias) async for log_message", "def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands", "log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return", "case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d servers as %s'", "found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd", "isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx):", "commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d servers as", "msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async", "import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True)", "commands from config import SETTINGS from crew import crew_embed from diary import diary_embed", "= commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d servers", "bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error):", "async def diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError as err:", "film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError as err: msg =", "import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S')", "%H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in", "except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command() async def", "= log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else:", "ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg =", "name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items():", "user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!',", "import LetterboxdError from list_ import list_embed from review import review_embed from user import", "by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description =", "try: # eiga.me ratings for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']:", "film_embed from helpers import LetterboxdError from list_ import list_embed from review import review_embed", "discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async", "await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed =", "def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif", "True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in", "if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg", "*args): try: msg = await list_embed(username, ' '.join(str(i) for i in args)) except", "isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error", "asyncio import sleep import discord from discord.ext import commands from config import SETTINGS", "continue if first_word in cmd_list: found_usr_cmd = True cmd_message = log_message break if", "and not found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg: if log_message.content:", "command.name) for alias in command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30):", "SETTINGS from crew import crew_embed from diary import diary_embed from film import film_embed", "msg = await review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as", "@bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot',", "ratings for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def", "2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args):", "command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx,", "diary_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress',", "True) else: msg = await film_embed(arg) except LetterboxdError as err: msg = err", "crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await", "'actress', 'director']) async def crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with)", "== bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg:", "'\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg =", "async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) #", "return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command", "alias in command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id", "await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3:", "57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for", "from discord.ext import commands from config import SETTINGS from crew import crew_embed from", "= await list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err:", "62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key,", "found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word =", "True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def", "from config import SETTINGS from crew import crew_embed from diary import diary_embed from", "for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg,", "msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This", "msg = await film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError as", "send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg = await", "await film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError as err: msg", "not found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word", "= False cmd_list = list() for command in bot.commands: cmd_list.append('!' + command.name) for", "elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ', '.join(err for", "help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value", "command in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' + alias)", "and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg = await", "False found_usr_cmd = False cmd_list = list() for command in bot.commands: cmd_list.append('!' +", "as err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx):", "await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list = list() for command", "= err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: #", "bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '')", "if len(msg) < 3: await ctx.send('This command requires 2 parameters.') return len(msg) >", "servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await", "err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg", "ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg = await user_embed(username) except LetterboxdError", "+ command.name) for alias in command.aliases: cmd_list.append('!' + alias) async for log_message in", "ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg =", "for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message =", "= err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg", "icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value,", "except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async", "= log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd = True cmd_message =", "value=value, inline=False) help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command()", "await list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err: msg", "(len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘',", "parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ', '.join(err", "err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try:", "try: msg = await user_embed(username) except LetterboxdError as err: msg = err await", "try: msg = await list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError", "LetterboxdError as err: msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg", "ctx.send('This command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def", "- {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error,", "'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def", "if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await", "first_word in cmd_list: found_usr_cmd = True cmd_message = log_message break if found_usr_cmd: if", "err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) <", "msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try: msg = await", "def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900)", "= 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx,", "'[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg = await user_embed(username)", "err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me", "servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'):", "await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg):", "format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def", "logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else:", "LetterboxdError from list_ import list_embed from review import review_embed from user import user_embed", "as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx,", "else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx,", "= await film_embed(arg) except LetterboxdError as err: msg = err await send_msg(ctx, msg)", "bot_message = log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0]", "command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed):", "try: msg = await diary_embed(username) except LetterboxdError as err: msg = err await", "async def crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError", "await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62))", "'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username):", "bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await", "%d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if", "True cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id", "found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete() await", "# eiga.me ratings for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg", "msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *,", "help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False)", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def", "args)) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args)", "as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content", "found_usr_cmd = True cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if", "help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def", "import film_embed from helpers import LetterboxdError from list_ import list_embed from review import", "import SETTINGS from crew import crew_embed from diary import diary_embed from film import", "ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png')", "= await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err await send_msg(ctx,", "except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async", "= err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg):", "%s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message): if message.content.startswith('!'): message.content =", "err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await", "def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer(", "import list_embed from review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s", "cmd_list = list() for command in bot.commands: cmd_list.append('!' + command.name) for alias in", "= await film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError as err:", "@commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg = await review_embed(username, ' '.join(str(i)", "= await user_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg)", "from review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s',", "requires the {} permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound,", "async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await", "await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg = await user_embed(username) except", "> 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg = await", "first_word = log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd = True cmd_message", "import logging from asyncio import sleep import discord from discord.ext import commands from", "if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete()", "msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if", "%(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info(", "for command in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!' +", "bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d", "else: await ctx.send(msg) # Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57,", "def diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError as err: msg", "def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def", "', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error,", "diary import diary_embed from film import film_embed from helpers import LetterboxdError from list_", "alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg:", "return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async def", "await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx,", "username, *args): try: msg = await list_embed(username, ' '.join(str(i) for i in args))", "list_ import list_embed from review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO,", "discord.ext import commands from config import SETTINGS from crew import crew_embed from diary", "on_ready(): logging.info( 'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event", "import sleep import discord from discord.ext import commands from config import SETTINGS from", "crew import crew_embed from diary import diary_embed from film import film_embed from helpers", "cmd_list: found_usr_cmd = True cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages:", "bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds))))", "@bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg = await list_embed(username, '", "elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in cmd_list:", "discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png')", "Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite", "msg = await user_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx,", "bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged in %d servers as %s' %", "send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command()", "in args)) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry'])", "log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg = True elif", "msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try:", "helpers import LetterboxdError from list_ import list_embed from review import review_embed from user", "if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise", "async def on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async", "msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd =", "@commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list", "def review(ctx, username, *args): try: msg = await review_embed(username, ' '.join(str(i) for i", "if first_word in cmd_list: found_usr_cmd = True cmd_message = log_message break if found_usr_cmd:", "found_usr_cmd = False cmd_list = list() for command in bot.commands: cmd_list.append('!' + command.name)", "import diary_embed from film import film_embed from helpers import LetterboxdError from list_ import", "try: msg = await review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError", "message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True: await", "servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else:", "@bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try: msg = await crew_embed(arg,", "Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd", "review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err: msg =", "msg) @bot.command() async def diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError", "await user_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command()", "if log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg = True", "icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot", "if log_message.content: first_word = log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd =", "in command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id ==", "len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg =", "crew_embed from diary import diary_embed from film import film_embed from helpers import LetterboxdError", "| '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg", "value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\\ +", "err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg =", "send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await", "user_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command() async", "raise error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await", "@bot.command() async def diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError as", "< 3: await ctx.send('This command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list')", "crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err:", "found_bot_msg = False found_usr_cmd = False cmd_list = list() for command in bot.commands:", "else: continue if first_word in cmd_list: found_usr_cmd = True cmd_message = log_message break", "in cmd_list: found_usr_cmd = True cmd_message = log_message break if found_usr_cmd: if not", "isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await", "while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async", "film(ctx, *, arg): try: # eiga.me ratings for specific servers if ctx.guild and", "+ '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try: msg = await", "user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help')", "crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie'])", "= log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id:", "specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True)", "helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created", "help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in", "for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot |", "config import SETTINGS from crew import crew_embed from diary import diary_embed from film", "level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async", "if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg) # Commands @bot.command() async def", "err: msg = err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try:", "message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb", "async def user(ctx, username): try: msg = await user_embed(username) except LetterboxdError as err:", "ctx.guild.id in SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg = await film_embed(arg)", "@bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try: msg = await review_embed(username, '", "review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot", "cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and", "else: msg = await film_embed(arg) except LetterboxdError as err: msg = err await", "err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username,", "commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command", "for alias in command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if", "async def on_ready(): logging.info( 'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name))", "logging.info( 'Logged in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async", "def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires", "import crew_embed from diary import diary_embed from film import film_embed from helpers import", "send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me ratings for", "= False found_usr_cmd = False cmd_list = list() for command in bot.commands: cmd_list.append('!'", "ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the", "msg = err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args):", "delete(ctx): await ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list = list() for", "if not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete() await bot_message.delete()", "on_message(message): if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats():", "from helpers import LetterboxdError from list_ import list_embed from review import review_embed from", "isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This", "user(ctx, username): try: msg = await user_embed(username) except LetterboxdError as err: msg =", "list_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err: msg =", "for i in args)) except LetterboxdError as err: msg = err await send_msg(ctx,", "@commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg = await list_embed(username, ' '.join(str(i)", "ctx.invoked_with) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async", "= ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires 2 parameters.') return", "'.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError):", "SETTINGS['mkdb_servers']: msg = await film_embed(arg, True) else: msg = await film_embed(arg) except LetterboxdError", "| %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready():", "in %d servers as %s' % (len(bot.guilds), bot.user.name)) bot.loop.create_task(update_stats()) @bot.event async def on_message(message):", "await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director']) async def crew(ctx, *, arg): try: msg", "commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the", "ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires 2 parameters.') return len(msg)", "Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by Porkepik#2664', icon_url='https://i.imgur.com/li4cLpd.png') for key, value in SETTINGS['help'].items(): help_embed.add_field(name=key,", "in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)'", "try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err", "log_message.content.split()[0] else: continue if first_word in cmd_list: found_usr_cmd = True cmd_message = log_message", "= err await send_msg(ctx, msg) @bot.command(aliases=['entry']) @commands.check(check_if_two_args) async def review(ctx, username, *args): try:", "await review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err: msg", "datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event async def on_ready(): logging.info( 'Logged", "len(msg) < 3: await ctx.send('This command requires 2 parameters.') return len(msg) > 2", "= await review_embed(username, ' '.join(str(i) for i in args)) except LetterboxdError as err:", "msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This command requires 2 parameters.')", "2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg = await list_embed(username,", "list() for command in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases: cmd_list.append('!'", "diary(ctx, username): try: msg = await diary_embed(username) except LetterboxdError as err: msg =", "error async def send_msg(ctx, msg): if isinstance(msg, discord.Embed): await ctx.send(embed=msg) else: await ctx.send(msg)", "@bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a", "= list() for command in bot.commands: cmd_list.append('!' + command.name) for alias in command.aliases:", "logging.basicConfig( level=logging.INFO, format='%(asctime)s | %(message)s', datefmt='%m/%d %H:%M:%S') bot = commands.Bot(command_prefix='!', case_insensitive=True) bot.remove_command('help') @bot.event", "ctx.message.delete() found_bot_msg = False found_usr_cmd = False cmd_list = list() for command in", "username): try: msg = await diary_embed(username) except LetterboxdError as err: msg = err", "not ctx.author.permissions_in(ctx.channel).manage_messages: if not cmd_message.author.id == ctx.author.id: return await cmd_message.delete() await bot_message.delete() bot.run(SETTINGS['discord'])", "return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async def list_(ctx, username, *args): try: msg", "error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.') elif isinstance(error, commands.BotMissingPermissions):", "# Commands @bot.command() async def helplb(ctx): help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author(", "Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed) @bot.command() async def user(ctx, username): try:", "the {} permission.'.format( ', '.join(err for err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)):", "def list_(ctx, username, *args): try: msg = await list_embed(username, ' '.join(str(i) for i", "await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err await send_msg(ctx, msg)", "'director']) async def crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except", "command.aliases: cmd_list.append('!' + alias) async for log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id", "await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command", "update_stats(): while True: await bot.change_presence( activity=discord.Game('!helplb - {} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event", "await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete() found_bot_msg = False", "found_bot_msg = True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue if", "except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor', 'actress', 'director'])", "help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await ctx.send(embed=help_embed)", "err in error.missing_perms))) elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original,", "*, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg", "as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *,", "log_message found_bot_msg = True elif found_bot_msg: if log_message.content: first_word = log_message.content.split()[0] else: continue", "from diary import diary_embed from film import film_embed from helpers import LetterboxdError from", "err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg):", "isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/')", "msg = err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg", "a parameter.') elif isinstance(error, commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ',", "SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\\ + '[GitHub](https://github.com/Porkepik/Letterboxd-Bot)' await", "= message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True: await bot.change_presence(", "len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This", "async def check_if_two_args(ctx): msg = ctx.message.content.split() if len(msg) < 3: await ctx.send('This command", "{} servers'.format( len(bot.guilds)))) await sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument):", "err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg = await", "def user(ctx, username): try: msg = await user_embed(username) except LetterboxdError as err: msg", "key, value in SETTINGS['help'].items(): help_embed.add_field(name=key, value=value, inline=False) help_embed.description = 'Invite Bot | '\\", "sleep import discord from discord.ext import commands from config import SETTINGS from crew", "async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires a parameter.')", "err: msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg = ctx.message.content.split()", "from asyncio import sleep import discord from discord.ext import commands from config import", "bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg = True elif found_bot_msg: if", "as err: msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx): msg =", "eiga.me ratings for specific servers if ctx.guild and ctx.guild.id in SETTINGS['mkdb_servers']: msg =", "if message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while", "async def film(ctx, *, arg): try: # eiga.me ratings for specific servers if", "msg) @bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me ratings for specific", "= True cmd_message = log_message break if found_usr_cmd: if not ctx.author.permissions_in(ctx.channel).manage_messages: if not", "= err await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg =", "except LetterboxdError as err: msg = err await send_msg(ctx, msg) async def check_if_two_args(ctx):", "args)) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True)", "log_message in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['movie']) async def film(ctx,", "ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content) raise error async def send_msg(ctx, msg): if", "film_embed(arg) except LetterboxdError as err: msg = err await send_msg(ctx, msg) async def", "await ctx.send('This command requires 2 parameters.') return len(msg) > 2 @bot.command(name='list') @commands.check(check_if_two_args) async", "def crew(ctx, *, arg): try: msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as", "message.content.startswith('!'): message.content = message.content.replace('’', '').replace('‘', '') await bot.process_commands(message) async def update_stats(): while True:", "async def list_(ctx, username, *args): try: msg = await list_embed(username, ' '.join(str(i) for", "LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command() async def diary(ctx,", "diary_embed from film import film_embed from helpers import LetterboxdError from list_ import list_embed", "commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else: await ctx.send('Sorry, the command crashed. :/') logging.error(ctx.message.content)", "in ctx.channel.history(limit=30): if log_message.author.id == bot.user.id and not found_bot_msg: bot_message = log_message found_bot_msg", "import discord from discord.ext import commands from config import SETTINGS from crew import", "list_embed from review import review_embed from user import user_embed logging.basicConfig( level=logging.INFO, format='%(asctime)s |", "await send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg = await diary_embed(username)", "msg = err await send_msg(ctx, msg) @bot.command(name='del') @commands.bot_has_permissions(manage_messages=True) async def delete(ctx): await ctx.message.delete()", "import commands from config import SETTINGS from crew import crew_embed from diary import", "commands.BotMissingPermissions): await ctx.send('This command requires the {} permission.'.format( ', '.join(err for err in", "send_msg(ctx, msg) @bot.command() async def diary(ctx, username): try: msg = await diary_embed(username) except", "arg): try: # eiga.me ratings for specific servers if ctx.guild and ctx.guild.id in", "await diary_embed(username) except LetterboxdError as err: msg = err await send_msg(ctx, msg) @bot.command(aliases=['actor',", "@bot.command() async def user(ctx, username): try: msg = await user_embed(username) except LetterboxdError as", "help_embed = discord.Embed(colour=discord.Color.from_rgb(54, 57, 62)) help_embed.set_thumbnail(url='https://i.imgur.com/Kr1diFu.png') help_embed.set_author( name='Letterboxd Bot', icon_url='https://i.imgur.com/5VALKVy.jpg') help_embed.set_footer( text='Created by", "msg = await crew_embed(arg, ctx.invoked_with) except LetterboxdError as err: msg = err await", "film import film_embed from helpers import LetterboxdError from list_ import list_embed from review", "sleep(900) @bot.event async def on_command_error(ctx, error): if isinstance(error, commands.MissingRequiredArgument): await ctx.send('This command requires", "elif isinstance(error, (commands.CommandNotFound, commands.CheckFailure)): return elif isinstance(error, commands.CommandInvokeError): if isinstance(error.original, discord.HTTPException): return else:", "@bot.command(aliases=['movie']) async def film(ctx, *, arg): try: # eiga.me ratings for specific servers" ]
[ "result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id,", "such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try:", "try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id:", "need to fix this later. \"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id", "-> ray.ObjectRef: \"\"\"Get the output of a running workflow. See \"api.get_output()\" for details.", "== StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery", "workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\")", "import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor", "= get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to", "_) in all_failed])) return [(wid, obj) for (wid, obj) in ret if obj", "{workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get", "workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is important to", "this later. \"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id is None: #", "(k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in runnings:", "runnings = set(runnings) # Here we don't have workflow id, so use empty", "result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def", "See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError", "in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret", "def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError:", "except ValueError: workflow_manager = None if workflow_manager is None: runnings = [] else:", "import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step", "ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger", "if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark):", "ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id:", "ValueError( \"Failed to connect to the workflow management \" \"actor. The workflow could", "e: raise ValueError( \"Failed to connect to the workflow management \" \"actor. The", "\"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as", "result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id,", "-> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The current \"run\" always overwrite", "already failed. You can use \" \"workflow.resume() to resume the workflow.\") from e", "except Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run(", "fix this later. \"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id is None:", "None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark):", "TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage", "we don't have workflow id, so use empty one instead store = workflow_storage.get_workflow_storage(\"\")", "workflow management \" \"actor. The workflow could have already failed. You can use", "k not in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s))", "-> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try:", "entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support", "logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) ->", "assert ray.is_initialized() if workflow_id is None: # Workflow ID format: {Entry workflow UUID}.{Unix", "fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type", "[] for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not", "async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await", "resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def", "# checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor()", "= get_or_create_management_actor() # NOTE: It is important to 'ray.get' the returned output. This", "ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job", "storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is important to 'ray.get' the", "the output of a running workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized()", "list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager", "= False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if", "[] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1:", "use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s)", "import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import", "ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import", "workflow_manager = get_management_actor() except ValueError: workflow_manager = None if workflow_manager is None: runnings", "UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\")", "obj except Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid, None) ret =", "ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import", "result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef:", "workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager", "meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except", "empty one instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in", "output of a running workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try:", "running workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor()", "except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager", "ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str])", "filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor()", "get_or_create_management_actor() # NOTE: It is important to 'ray.get' the returned output. This #", "have workflow id, so use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret =", "get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except", "import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import", "time from typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray", "ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor,", "name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a running workflow. See \"api.get_output()\"", "s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]:", "\" \"workflow.resume() to resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return", "WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) ->", "Otherwise if the actor removes the reference of the # workflow output, the", "meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus]", "workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if", "= get_management_actor() except ValueError: workflow_manager = None if workflow_manager is None: runnings =", "raise RuntimeError(\"Failed to get management actor\") from e async def _resume_one(wid: str) ->", "ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\"", "and k not in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k,", "# result. Otherwise if the actor removes the reference of the # workflow", "ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from", "the reference of the # workflow output, the caller may fail to resolve", "of the # workflow output, the caller may fail to resolve the result.", "storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE:", "try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None if workflow_manager is None:", "str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage", "ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type !=", "workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\"", "except Exception: running = False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta", "the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing =", "logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is important", "logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool = True) ->", "list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e: raise RuntimeError(\"Failed to get", "a workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow", "get_management_actor() except ValueError: workflow_manager = None if workflow_manager is None: runnings = []", "= workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj) for (wid,", "existing workflow. # We need to fix this later. \"\"\" store = get_global_storage()", "\"run\" always overwrite existing workflow. # We need to fix this later. \"\"\"", "Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid)", "# TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume", "to 'ray.get' the returned output. This # ensures caller of 'run()' holds the", "None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj)", "is None: # Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id", "NOTE: It is important to 'ray.get' the returned output. This # ensures caller", "1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) # Here", "(wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid,", "import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger =", "from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING:", "can use \" \"workflow.resume() to resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id,", "wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor()", "\"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow ID", "def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef:", "\"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() #", "= workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running", "runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret def", "if workflow_id is None: # Workflow ID format: {Entry workflow UUID}.{Unix time to", "reference to the workflow # result. Otherwise if the actor removes the reference", "resume workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _)", "None if workflow_manager is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if", "in all_failed])) return [(wid, obj) for (wid, obj) in ret if obj is", "with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow)", "import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor)", "Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return", "to get management actor\") from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]:", "ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]:", "# ensures caller of 'run()' holds the reference to the workflow # result.", "= get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) ->", "workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id)", "{WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception", "-> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None if", "= ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def", "== WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE if s in", "Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output)", "get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import", "WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in", "storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws,", "WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import", "workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager", "result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj", "Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a running workflow. See \"api.get_output()\" for", "for (wid, _) in all_failed])) return [(wid, obj) for (wid, obj) in ret", "overwrite existing workflow. # We need to fix this later. \"\"\" store =", "ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE}", "workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str)", "= get_management_actor() except ValueError as e: raise ValueError( \"Failed to connect to the", "commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE:", "bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set)", "to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job", "management actor\") from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result:", "get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow:", "True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The current \"run\" always", "all_failed])) return [(wid, obj) for (wid, obj) in ret if obj is not", "-> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED))", "try: workflow_manager = get_management_actor() except Exception as e: raise RuntimeError(\"Failed to get management", "List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager", "f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is important to 'ray.get' the returned", "the workflow # result. Otherwise if the actor removes the reference of the", "def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details.", "= ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return WorkflowStatus.RUNNING store", "ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for", "ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id,", "workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING", "support recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow", "s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if", "except ValueError as e: raise ValueError( \"Failed to connect to the workflow management", "as e: raise RuntimeError(\"Failed to get management actor\") from e async def _resume_one(wid:", "id, so use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for", "= set(runnings) # Here we don't have workflow id, so use empty one", "\"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It", "resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the", "meta = store.load_workflow_meta() if meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return", "Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow", "the returned output. This # ensures caller of 'run()' holds the reference to", "to resume workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid,", "ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try:", "if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id:", "None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def", "the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\")", "logging import time from typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid", "WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access", "workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running =", "Here we don't have workflow id, so use empty one instead store =", "Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager =", "resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id}", "wid, obj except Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid, None) ret", "List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None if workflow_manager", "raise ValueError( \"Failed to connect to the workflow management \" \"actor. The workflow", "= workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION)", "'run()' holds the reference to the workflow # result. Otherwise if the actor", "try: workflow_manager = get_management_actor() except ValueError as e: raise ValueError( \"Failed to connect", "None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) ->", "flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to resume workflow {wid}\") return", "to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id,", "store = get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow ID format:", "workflow_manager is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in", "workflow_manager = None if workflow_manager is None: runnings = [] else: runnings =", "logger.error(f\"Failed to resume workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for", "status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set =", "return [(wid, obj) for (wid, obj) in ret if obj is not None]", "output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try:", "returned output. This # ensures caller of 'run()' holds the reference to the", "[id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id)", "import time from typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import", "WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool", "TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str]", "It is important to 'ray.get' the returned output. This # ensures caller of", "store.load_workflow_meta() if meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def", "return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return", "= workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise ValueError(f\"No such workflow_id", "ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage = get_global_storage()", "f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\",", "if meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter:", "for r in runnings] runnings = set(runnings) # Here we don't have workflow", "workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj) for (wid, obj)", "recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously.", "in runnings] runnings = set(runnings) # Here we don't have workflow id, so", "len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings)", "StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import (", "if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as", "result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return", "def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed", "workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): #", "created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws =", "s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed:", "uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common", "return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise", "set(runnings) # Here we don't have workflow id, so use empty one instead", "= get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow ID format: {Entry", "else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id:", "return wid, obj except Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid, None)", "job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef:", "ValueError as e: raise ValueError( \"Failed to connect to the workflow management \"", "_resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj", "\"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming", "to the workflow management \" \"actor. The workflow could have already failed. You", "str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception:", "Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously.", "return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef", "result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a", "flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except", "result. Otherwise if the actor removes the reference of the # workflow output,", "asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj) for (wid, obj) in", "f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the", "ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The current \"run\" always overwrite existing", "in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set", "ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return WorkflowStatus.RUNNING store =", "get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a running workflow.", "details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e: raise", "ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager =", "caller may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing))", "entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is", "= workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in store.list_workflow(): if s ==", "ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager =", "cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store =", "This # ensures caller of 'run()' holds the reference to the workflow #", "\"workflow.resume() to resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id,", "if the actor removes the reference of the # workflow output, the caller", "= True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The current \"run\"", "from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult", "workflow_id: Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a workflow", "to resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output)", "ray.ObjectRef: \"\"\"Get the output of a running workflow. See \"api.get_output()\" for details. \"\"\"", "store.storage_url): # checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager =", "= ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return", "try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid,", "logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool =", "\"\"\"Run a workflow asynchronously. # TODO(suquark): The current \"run\" always overwrite existing workflow.", "workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important", "raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str,", "None: # Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id =", "workflow_id is None: # Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds}", "wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get(", "output) def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError:", "ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow,", "flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str) ->", "workflow {wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in", "a workflow asynchronously. # TODO(suquark): The current \"run\" always overwrite existing workflow. #", "import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite:", "from typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray from", "from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] =", "await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to", "str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a running workflow. See", "import uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from", "def get_status(workflow_id: str) -> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id))", "import asyncio import logging import time from typing import Set, List, Tuple, Optional,", "except Exception as e: raise RuntimeError(\"Failed to get management actor\") from e async", "if s == WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE if", "str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj =", "output. This # ensures caller of 'run()' holds the reference to the workflow", "Workflow, workflow_id: Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a", "status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings", "# NOTE: It is important to 'ray.get' the returned output. This # ensures", "ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData,", "r in runnings] runnings = set(runnings) # Here we don't have workflow id,", "ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED)", "flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs.", "workflow_manager = get_management_actor() except ValueError as e: raise ValueError( \"Failed to connect to", "if workflow_manager is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING", "if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r", "failed. You can use \" \"workflow.resume() to resume the workflow.\") from e output", "runnings] runnings = set(runnings) # Here we don't have workflow id, so use", "commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if", "and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings =", "ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) #", "\"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager", "e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None:", "# Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\"", "logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow", "s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in runnings: s", "time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with", "def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store", "asyncio import logging import time from typing import Set, List, Tuple, Optional, TYPE_CHECKING", "inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for", "workflow. # We need to fix this later. \"\"\" store = get_global_storage() assert", "return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor()", "the caller may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id,", "is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter", "workflow could have already failed. You can use \" \"workflow.resume() to resume the", "None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and", "caller may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id,", "\"Failed to connect to the workflow management \" \"actor. The workflow could have", "import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType)", "= {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except", "-> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid,", "the workflow management \" \"actor. The workflow could have already failed. You can", "with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See", "to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type ==", "= None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. #", "Exception: running = False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta =", "use \" \"workflow.resume() to resume the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name))", "is important to 'ray.get' the returned output. This # ensures caller of 'run()'", "typing import Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow", "workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from", "later. \"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow", "nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url):", "= ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager", "don't have workflow id, so use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret", "current \"run\" always overwrite existing workflow. # We need to fix this later.", "a running workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager =", "# workflow output, the caller may fail to resolve the result. result: \"WorkflowExecutionResult\"", "format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created.", "= get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running:", "workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) #", "\"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output)", "return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output", "get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return", "try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False", "from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage", "ValueError: workflow_manager = None if workflow_manager is None: runnings = [] else: runnings", "WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None if workflow_manager is", "name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor()", "return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) # Here we", "get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is", "from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" =", "workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name:", "e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = (", "the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return", "== 1: return [(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) #", "List, Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context from", "workflow asynchronously. # TODO(suquark): The current \"run\" always overwrite existing workflow. # We", "WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) # Here we don't have", "Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context from ray.experimental.workflow import", "ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to 'ray.get' the", "to fix this later. \"\"\" store = get_global_storage() assert ray.is_initialized() if workflow_id is", "actor removes the reference of the # workflow output, the caller may fail", "WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter:", "<gh_stars>0 import asyncio import logging import time from typing import Set, List, Tuple,", "[(r, WorkflowStatus.RUNNING) for r in runnings] runnings = set(runnings) # Here we don't", "store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise ValueError(f\"No such", "{Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\",", "WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None: raise ValueError(f\"No", "if s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool) -> List[Tuple[str,", "TODO(suquark): The current \"run\" always overwrite existing workflow. # We need to fix", "to connect to the workflow management \" \"actor. The workflow could have already", "filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e: raise", "assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e: raise ValueError( \"Failed", "\"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else:", "ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None,", "# TODO(suquark): The current \"run\" always overwrite existing workflow. # We need to", "return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str)", "= get_management_actor() except Exception as e: raise RuntimeError(\"Failed to get management actor\") from", "= ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING)", "workflow_manager = get_or_create_management_actor() # NOTE: It is important to 'ray.get' the returned output.", "management \" \"actor. The workflow could have already failed. You can use \"", "asynchronously. # TODO(suquark): The current \"run\" always overwrite existing workflow. # We need", "for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in", "the # workflow output, the caller may fail to resolve the result. result:", "runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter)", ") -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager = get_management_actor() except ValueError: workflow_manager = None", "(entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to 'ray.get' the returned output.", "as e: raise ValueError( \"Failed to connect to the workflow management \" \"actor.", "store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in store.list_workflow(): if s", "str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id)", "workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output)", "= None if workflow_manager is None: runnings = [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote())", "the actor removes the reference of the # workflow output, the caller may", "(Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from", "TODO(suquark): support recovery with ObjectRef inputs. def resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a", "(wid, _) in all_failed])) return [(wid, obj) for (wid, obj) in ret if", "-> Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running", "get_management_actor() except Exception as e: raise RuntimeError(\"Failed to get management actor\") from e", "is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] )", "all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e: raise RuntimeError(\"Failed", "overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The", "import logging import time from typing import Set, List, Tuple, Optional, TYPE_CHECKING import", "= f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint", "get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to 'ray.get'", "job created. [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{store.storage_url}\\\"].\") with workflow_context.workflow_step_context(workflow_id, store.storage_url): # checkpoint the workflow ws", "the reference to the workflow # result. Otherwise if the actor removes the", "not in runnings: s = WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return", "one instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in store.list_workflow():", "actor\") from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\"", "resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION:", "= get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It", "= (entry_workflow.data.step_type != StepType.FUNCTION) # NOTE: It is important to 'ray.get' the returned", "workflow id, so use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret = []", "result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to resume workflow {wid}\") return (wid,", "get_global_storage() assert ray.is_initialized() if workflow_id is None: # Workflow ID format: {Entry workflow", "in store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in runnings: s =", "caller of 'run()' holds the reference to the workflow # result. Otherwise if", "run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool = True) -> ray.ObjectRef: \"\"\"Run", "workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from", "get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def run(entry_workflow: Workflow,", "return ret def resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed:", "from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage from ray.experimental.workflow.workflow_access import ( flatten_workflow_output,", "s == WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE if s", "= [] else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) ==", "instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k, s) in store.list_workflow(): if", "-> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage =", "important to 'ray.get' the returned output. This # ensures caller of 'run()' holds", "RuntimeError(\"Failed to get management actor\") from e async def _resume_one(wid: str) -> Tuple[str,", "resume(workflow_id: str) -> ray.ObjectRef: \"\"\"Resume a workflow asynchronously. See \"api.resume()\" for details. \"\"\"", "ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager =", "{wid}\") return (wid, None) ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed]))", "e: raise RuntimeError(\"Failed to get management actor\") from e async def _resume_one(wid: str)", "ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e: raise ValueError( \"Failed to", "may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False))", "ret = [] for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and", "fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow", "resume_all(with_failed: bool) -> List[Tuple[str, ray.ObjectRef]]: filter_set = {WorkflowStatus.RESUMABLE} if with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed =", "'ray.get' the returned output. This # ensures caller of 'run()' holds the reference", "\" \"actor. The workflow could have already failed. You can use \" \"workflow.resume()", "removes the reference of the # workflow output, the caller may fail to", "holds the reference to the workflow # result. Otherwise if the actor removes", "for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager =", "have already failed. You can use \" \"workflow.resume() to resume the workflow.\") from", "workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str)", "= logging.getLogger(__name__) def run(entry_workflow: Workflow, workflow_id: Optional[str] = None, overwrite: bool = True)", "= store.load_workflow_meta() if meta is None: raise ValueError(f\"No such workflow_id {workflow_id}\") return meta.status", "may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if", "( flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__)", "We need to fix this later. \"\"\" store = get_global_storage() assert ray.is_initialized() if", "Workflow ID format: {Entry workflow UUID}.{Unix time to nanoseconds} workflow_id = f\"{str(uuid.uuid4())}.{time.time():.9f}\" logger.info(f\"Workflow", "from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor", "ret = workflow_storage.asyncio_run( asyncio.gather(*[_resume_one(wid) for (wid, _) in all_failed])) return [(wid, obj) for", "The workflow could have already failed. You can use \" \"workflow.resume() to resume", "StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output) else: return flatten_workflow_output(workflow_id, result.volatile_output) # TODO(suquark): support recovery with", "details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor()", "import (Workflow, WorkflowStatus, WorkflowMetaData, StepType) from ray.experimental.workflow.step_executor import commit_step from ray.experimental.workflow.storage import get_global_storage", "workflow output, the caller may fail to resolve the result. result: \"WorkflowExecutionResult\" =", "flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of", "connect to the workflow management \" \"actor. The workflow could have already failed.", "running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is None:", "running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running = False if running: return WorkflowStatus.RUNNING", "the workflow.\") from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id:", "= ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception:", "if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta is", "( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed", "Set, List, Tuple, Optional, TYPE_CHECKING import uuid import ray from ray.experimental.workflow import workflow_context", "[id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\") workflow_manager = get_or_create_management_actor() # NOTE: It is important to 'ray.get'", "workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to resume", "could have already failed. You can use \" \"workflow.resume() to resume the workflow.\")", "asynchronously. See \"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\"", "with_failed: filter_set.add(WorkflowStatus.FAILED) all_failed = list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e:", "reference of the # workflow output, the caller may fail to resolve the", "{workflow_id}\") return meta.status def list_all(status_filter: Set[WorkflowStatus] ) -> List[Tuple[str, WorkflowStatus]]: try: workflow_manager =", "!= StepType.FUNCTION) # NOTE: It is important to 'ray.get' the returned output. This", "runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return [(r,", "StepType.FUNCTION) # NOTE: It is important to 'ray.get' the returned output. This #", "\"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e: raise ValueError(", "else: runnings = ray.get(workflow_manager.list_running_workflow.remote()) if WorkflowStatus.RUNNING in status_filter and len(status_filter) == 1: return", "of a running workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager", "\"\"\"Get the output of a running workflow. See \"api.get_output()\" for details. \"\"\" assert", "Exception as e: raise RuntimeError(\"Failed to get management actor\") from e async def", "result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(workflow_id, ignore_existing)) if entry_workflow.data.step_type == StepType.FUNCTION: return flatten_workflow_output(workflow_id, result.persisted_output)", "from e output = ray.get(workflow_manager.get_output.remote(workflow_id, name)) return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) ->", "# Here we don't have workflow id, so use empty one instead store", "store.list_workflow(): if s == WorkflowStatus.RUNNING and k not in runnings: s = WorkflowStatus.RESUMABLE", "\"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid)) obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except", "= list_all(filter_set) try: workflow_manager = get_management_actor() except Exception as e: raise RuntimeError(\"Failed to", "output, the caller may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get(", "See \"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\", storage_url=\" f\"\\\"{storage.storage_url}\\\"].\")", "def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try: result: \"WorkflowExecutionResult\" = ( await workflow_manager.run_or_resume.remote(wid))", "to the workflow # result. Otherwise if the actor removes the reference of", "workflow # result. Otherwise if the actor removes the reference of the #", "in status_filter and len(status_filter) == 1: return [(r, WorkflowStatus.RUNNING) for r in runnings]", "False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta() if meta", "so use empty one instead store = workflow_storage.get_workflow_storage(\"\") ret = [] for (k,", "get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id)) except ValueError: wf_store = workflow_storage.get_workflow_storage(workflow_id) wf_store.save_workflow_meta(WorkflowMetaData(WorkflowStatus.CANCELED)) def get_status(workflow_id: str) -> Optional[WorkflowStatus]:", "flatten_workflow_output, get_or_create_management_actor, get_management_actor) if TYPE_CHECKING: from ray.experimental.workflow.step_executor import WorkflowExecutionResult logger = logging.getLogger(__name__) def", "workflow. See \"api.get_output()\" for details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except", "ensures caller of 'run()' holds the reference to the workflow # result. Otherwise", "workflow_manager.run_or_resume.remote( workflow_id, ignore_existing=False)) logger.info(f\"Workflow job {workflow_id} resumed.\") return flatten_workflow_output(workflow_id, result.persisted_output) def get_output(workflow_id: str,", "get management actor\") from e async def _resume_one(wid: str) -> Tuple[str, Optional[ray.ObjectRef]]: try:", "of 'run()' holds the reference to the workflow # result. Otherwise if the", "def get_output(workflow_id: str, name: Optional[str]) -> ray.ObjectRef: \"\"\"Get the output of a running", "from ray.experimental.workflow import workflow_context from ray.experimental.workflow import workflow_storage from ray.experimental.workflow.common import (Workflow, WorkflowStatus,", "= [] for (k, s) in store.list_workflow(): if s == WorkflowStatus.RUNNING and k", "checkpoint the workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing", "Optional[WorkflowStatus]: try: workflow_manager = get_management_actor() running = ray.get( workflow_manager.is_workflow_running.remote(workflow_id)) except Exception: running =", "bool = True) -> ray.ObjectRef: \"\"\"Run a workflow asynchronously. # TODO(suquark): The current", "workflow asynchronously. See \"api.resume()\" for details. \"\"\" storage = get_global_storage() logger.info(f\"Resuming workflow [id=\\\"{workflow_id}\\\",", "always overwrite existing workflow. # We need to fix this later. \"\"\" store", "the caller may fail to resolve the result. result: \"WorkflowExecutionResult\" = ray.get( workflow_manager.run_or_resume.remote(", "= WorkflowStatus.RESUMABLE if s in status_filter: ret.append((k, s)) return ret def resume_all(with_failed: bool)", "for details. \"\"\" assert ray.is_initialized() try: workflow_manager = get_management_actor() except ValueError as e:", "workflow_manager = get_management_actor() except Exception as e: raise RuntimeError(\"Failed to get management actor\")", "return flatten_workflow_output(workflow_id, output) def cancel(workflow_id: str) -> None: try: workflow_manager = get_management_actor() ray.get(workflow_manager.cancel_workflow.remote(workflow_id))", "# We need to fix this later. \"\"\" store = get_global_storage() assert ray.is_initialized()", "\"actor. The workflow could have already failed. You can use \" \"workflow.resume() to", "obj = flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to resume workflow", "running = False if running: return WorkflowStatus.RUNNING store = workflow_storage.get_workflow_storage(workflow_id) meta = store.load_workflow_meta()", "The current \"run\" always overwrite existing workflow. # We need to fix this", "You can use \" \"workflow.resume() to resume the workflow.\") from e output =", "ray.is_initialized() if workflow_id is None: # Workflow ID format: {Entry workflow UUID}.{Unix time", "workflow ws = workflow_storage.get_workflow_storage(workflow_id) commit_step(ws, \"\", entry_workflow) workflow_manager = get_or_create_management_actor() ignore_existing = (entry_workflow.data.step_type", "get_management_actor() except ValueError as e: raise ValueError( \"Failed to connect to the workflow", "= flatten_workflow_output(wid, result.persisted_output) return wid, obj except Exception: logger.error(f\"Failed to resume workflow {wid}\")" ]
[ "from .compose import Compose # noqa from .signature import BinarySignature, Signature, MemorySignature #", ".compose import Compose # noqa from .signature import BinarySignature, Signature, MemorySignature # noqa", "from .signature import BinarySignature, Signature, MemorySignature # noqa from .username import UsernameToken #", "Compose # noqa from .signature import BinarySignature, Signature, MemorySignature # noqa from .username", "noqa from .signature import BinarySignature, Signature, MemorySignature # noqa from .username import UsernameToken", "<reponame>bertonha/python-zeep from .compose import Compose # noqa from .signature import BinarySignature, Signature, MemorySignature", "# noqa from .signature import BinarySignature, Signature, MemorySignature # noqa from .username import", "import Compose # noqa from .signature import BinarySignature, Signature, MemorySignature # noqa from", ".signature import BinarySignature, Signature, MemorySignature # noqa from .username import UsernameToken # noqa" ]
[ "1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return", "y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T", "= u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt", "f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan):", "(1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10,", "k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)):", "return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:]", "import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11", "<reponame>peteboi/Python-Scripts<gh_stars>0 # -*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot as", "RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for", "dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T plt.grid()", "orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def", "-*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot as plt def", "M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def", "u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] =", "for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t =", "def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0", "-*- import numpy as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y =", "return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y =", "x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt):", "numpy as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y)", "np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:]", "k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt)", "utf-8 -*- import numpy as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y", "= RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t)", "y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1", "=y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t", "y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10])", "as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3", "k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range", "RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y", "import numpy as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u", "matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110", "def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1])", "y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y", "r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt", "#r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt", "as np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r=", "k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k", "= np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T plt.grid() plt.plot(x,y) plt.show()", "coding: utf-8 -*- import numpy as np import matplotlib.pyplot as plt def orbit(u):", "return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)])", "t = np.arange(0,10,dt) y0=np.array([10, 0.0, 10, 10]) sol_rk4=RK4_int(orbit,y0,t) x,y,v_x,v_y = sol_rk4.T plt.grid() plt.plot(x,y)", "plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return", "in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10,", "range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return y dt=0.1 t = np.arange(0,10,dt) y0=np.array([10, 0.0,", "def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y])", "RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in range (1,len(tspan)): y[k,:] = RK4(f,y[k-1],tspan[k]-tspan[k-1]) return", "np import matplotlib.pyplot as plt def orbit(u): x,y,v_x,v_y = u r=np.hypot(x,y) #r= 1.521e+06", "u r=np.hypot(x,y) #r= 1.521e+06 #M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt", "# -*- coding: utf-8 -*- import numpy as np import matplotlib.pyplot as plt", "#M,G=1.989e+30,6.7e-11 M,G=20,110 f=G*M/r**3 return np.array([v_x,v_y,-f*x,-f*y]) def RK4(f,u,dt): k1=f(u)*dt k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6", "k2=f(u+0.5*k1)*dt k3=f(u+0.5*k2)*dt k4=f(u+k3)*dt return u+(k1+2*k2+2*k3+k4)/6 def RK4_int(f,y0,tspan): y=np.zeros([len(tspan),len(y0)]) y[0,:] =y0 for k in" ]
[ "factories for all installed apps' def handle(self, *args, **options): created_files = [] for", "apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in", "<reponame>gamabounty/django-factory-generator import os from django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator", "import os from django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator import", "factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for all installed", "= FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('-", "+= factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('- ' + created_file))", "FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for all installed apps' def", "FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('- '", "installed apps' def handle(self, *args, **options): created_files = [] for app in apps.get_app_configs():", "created_files = [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files()", "for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:'))", "'Create model factories for all installed apps' def handle(self, *args, **options): created_files =", "= [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully", "django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand):", "all installed apps' def handle(self, *args, **options): created_files = [] for app in", "import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for all installed apps'", "from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for all", "app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for", "factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files:", "help = 'Create model factories for all installed apps' def handle(self, *args, **options):", "apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help =", "Command(BaseCommand): help = 'Create model factories for all installed apps' def handle(self, *args,", "for all installed apps' def handle(self, *args, **options): created_files = [] for app", "class Command(BaseCommand): help = 'Create model factories for all installed apps' def handle(self,", "def handle(self, *args, **options): created_files = [] for app in apps.get_app_configs(): factory_app_generator =", "*args, **options): created_files = [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files", "[] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created", "handle(self, *args, **options): created_files = [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app)", "django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model", "in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file", "created_files += factory_app_generator.create_files() self.stdout.write(self.style.SUCCESS('Successfully created factories:')) for created_file in created_files: self.stdout.write(self.style.SUCCESS('- ' +", "**options): created_files = [] for app in apps.get_app_configs(): factory_app_generator = FactoryAppGenerator(app) created_files +=", "import apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help", "from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create", "from django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator class", "import BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories", "model factories for all installed apps' def handle(self, *args, **options): created_files = []", "BaseCommand from factory_generator.generator import FactoryAppGenerator class Command(BaseCommand): help = 'Create model factories for", "= 'Create model factories for all installed apps' def handle(self, *args, **options): created_files", "apps' def handle(self, *args, **options): created_files = [] for app in apps.get_app_configs(): factory_app_generator", "os from django.apps import apps from django.core.management.base import BaseCommand from factory_generator.generator import FactoryAppGenerator" ]
[ "chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip: return matrix =", "lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this stage takes > 1", "h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using ( in", "NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb,", "{'__mem_gb' : 1}} def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) #", "def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out", "= [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in", "in h5 clustering_h5, in bool skip, in int random_seed, in int max_clusters, out", "cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants", "big jobs more threads in order to avoid overloading a node threads =", "'__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs):", "= matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as", "for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) #", "as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as", "cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants", "( in string clustering_key, ) \"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb':", "one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB))", "split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add", "cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE)", "= [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs]", "4 #TODO Not clear why this stage takes > 1 thread. Martian thinks", "in string clustering_key, ) \"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]}", "threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key':", "chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP,", "Copyright (c) 2017 10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as", "skip, in int random_seed, in int max_clusters, out h5 diffexp_h5, out path diffexp_csv,", ") \"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = []", "in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks,", "- give big jobs more threads in order to avoid overloading a node", "# # Copyright (c) 2017 10X Genomics, Inc. All rights reserved. # import", "using ( in string clustering_key, ) \"\"\" def split(args): if args.skip: return {'chunks':", "src py \"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key, ) \"\"\" def", "return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for reasons", "def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only", "in h5 matrix_h5, in h5 clustering_h5, in bool skip, in int random_seed, in", "4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, })", "as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as", "cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io", "out h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using (", "overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in", "cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if", "more threads in order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN)", "chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads in", "import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import", "cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants", "h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io", "NUM_THREADS_MIN = 4 #TODO Not clear why this stage takes > 1 thread.", "= cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression features matrix =", "{'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for reasons unknown", "stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool skip, in int", "outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for", "as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4", "= [] # FIXME: Add one for reasons unknown matrix_mem_gb = 1.8 *", "if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv", "clustering_h5, in bool skip, in int random_seed, in int max_clusters, out h5 diffexp_h5,", "Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io", "# FIXME: Add one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb", "{'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip: return matrix", "avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key", "= \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool skip,", "import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io import", "threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if", "int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split", "= int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads in order", "cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv)", "diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return", "#!/usr/bin/env python # # Copyright (c) 2017 10X Genomics, Inc. All rights reserved.", "and kills it on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5", "main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute", "does and kills it on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in", "analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs,", "}) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip:", "out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key,", "= cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix,", "2017 10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import", "#TODO Not clear why this stage takes > 1 thread. Martian thinks it", "it does and kills it on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION(", "rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome", "matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression features matrix", "import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not", "max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using", "as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why", "cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix", "= 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big", "in order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads =", "'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5)", "For now, only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering =", "import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import", "split using ( in string clustering_key, ) \"\"\" def split(args): if args.skip: return", "return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args, outs): if args.skip: return", "1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs", "f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs):", "python # # Copyright (c) 2017 10X Genomics, Inc. All rights reserved. #", "min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb':", "HACK - give big jobs more threads in order to avoid overloading a", "SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp)", "import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import", "diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key, ) \"\"\"", "(c) 2017 10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp", "random_seed, in int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\",", "h5 clustering_h5, in bool skip, in int random_seed, in int max_clusters, out h5", "= 4 #TODO Not clear why this stage takes > 1 thread. Martian", "as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this stage takes >", "outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs]", "threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads':", "__MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool", "analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants", ") split using ( in string clustering_key, ) \"\"\" def split(args): if args.skip:", "takes > 1 thread. Martian thinks it does and kills it on long", "chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in", "Not clear why this stage takes > 1 thread. Martian thinks it does", "'__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def", "why this stage takes > 1 thread. Martian thinks it does and kills", "\"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool skip, in", "matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5", "in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs: cr_io.copytree(csv_dir, outs.diffexp_csv, allow_existing=True)", "thread. Martian thinks it does and kills it on long jobs __MRO__ =", "\"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] #", "args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for", "# For now, only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering", "Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as", "for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return", "outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for", "h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads in order to avoid", "long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5,", "args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for", "= min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key,", "<reponame>qiangli/cellranger<filename>mro/stages/analyzer/run_differential_expression/__init__.py #!/usr/bin/env python # # Copyright (c) 2017 10X Genomics, Inc. All rights", "matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give", "# HACK - give big jobs more threads in order to avoid overloading", "chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs: cr_io.copytree(csv_dir, outs.diffexp_csv,", "return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression features", "# Copyright (c) 2017 10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp", "import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this stage", "[{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for reasons unknown matrix_mem_gb", "= 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads,", "args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene expression", "matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f:", "cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN =", "path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key, )", "if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one", "in int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", )", "chunks = [] # FIXME: Add one for reasons unknown matrix_mem_gb = 1.8", "matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5)", "in int random_seed, in int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src", "[chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s,", "node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({", "for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs: cr_io.copytree(csv_dir,", "clustering_key, ) \"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks =", "SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join':", "def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME:", "a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5):", "[] # FIXME: Add one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5)", "import cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN", "All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from", "RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in bool skip, in int random_seed,", "jobs more threads in order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb),", "as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs,", "analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants", "chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs =", "reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import", "kills it on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5,", "= SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key,", "SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix", "chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for", "args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip:", "as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as", "if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now, only compute for gene", "py \"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key, ) \"\"\" def split(args):", "order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4", "int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads in order to", "unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK -", "> 1 thread. Martian thinks it does and kills it on long jobs", "clear why this stage takes > 1 thread. Martian thinks it does and", "* cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more", "give big jobs more threads in order to avoid overloading a node threads", "reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK", "diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s =", ": 1}} def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For", "cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb, h5_constants.MIN_MEM_GB)) # HACK - give big jobs more threads", "1 thread. Martian thinks it does and kills it on long jobs __MRO__", "compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp", "diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp,", "# import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis", "diffexp_h5, out path diffexp_csv, src py \"stages/analyzer/run_differential_expression\", ) split using ( in string", "clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f,", "import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as", "cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear", "cr_diffexp import cellranger.analysis.io as analysis_io from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants", "as h5_constants import cellranger.analysis.constants as analysis_constants import cellranger.matrix as cr_matrix import cellranger.io as", "in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP])", "h5_constants.MIN_MEM_GB}]} chunks = [] # FIXME: Add one for reasons unknown matrix_mem_gb =", "1}} def main(args, outs): if args.skip: return matrix = cr_matrix.CountMatrix.load_h5_file(args.matrix_h5) # For now,", "\"stages/analyzer/run_differential_expression\", ) split using ( in string clustering_key, ) \"\"\" def split(args): if", "cr_matrix import cellranger.io as cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO", "cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s", "from cellranger.analysis.singlegenome import SingleGenomeAnalysis import cellranger.h5_constants as h5_constants import cellranger.analysis.constants as analysis_constants import", "string clustering_key, ) \"\"\" def split(args): if args.skip: return {'chunks': [{'__mem_gb': h5_constants.MIN_MEM_GB}]} chunks", "with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def join(args,", "for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5,", "only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key)", "return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out", "bool skip, in int random_seed, in int max_clusters, out h5 diffexp_h5, out path", "FIXME: Add one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb =", "threads in order to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads", "to avoid overloading a node threads = min(cr_io.get_thread_request_from_mem_gb(chunk_mem_gb), NUM_THREADS_MIN) threads = 4 for", "expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters)", "features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with", "chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in chunk_outs] chunk_csv_dirs", "jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5 clustering_h5, in", "'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' :", "for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp =", "10X Genomics, Inc. All rights reserved. # import cellranger.analysis.diffexp as cr_diffexp import cellranger.analysis.io", "[chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir in chunk_csv_dirs:", "h5 matrix_h5, in h5 clustering_h5, in bool skip, in int random_seed, in int", "on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in h5", "in bool skip, in int random_seed, in int max_clusters, out h5 diffexp_h5, out", "cr_io import cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this", "join(args, outs, chunk_defs, chunk_outs): if args.skip: return chunk_h5s = [chunk_out.diffexp_h5 for chunk_out in", "chunk_csv_dirs = [chunk_out.diffexp_csv for chunk_out in chunk_outs] analysis_io.combine_h5_files(chunk_h5s, outs.diffexp_h5, [analysis_constants.ANALYSIS_H5_DIFFERENTIAL_EXPRESSION_GROUP, analysis_constants.ANALYSIS_H5_KMEANS_DIFFERENTIAL_EXPRESSION_GROUP]) for csv_dir", "chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}} def main(args,", "Martian thinks it does and kills it on long jobs __MRO__ = \"\"\"", "chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb'", "stage takes > 1 thread. Martian thinks it does and kills it on", "int random_seed, in int max_clusters, out h5 diffexp_h5, out path diffexp_csv, src py", "cellranger.library_constants as lib_constants NUM_THREADS_MIN = 4 #TODO Not clear why this stage takes", "it on long jobs __MRO__ = \"\"\" stage RUN_DIFFERENTIAL_EXPRESSION( in h5 matrix_h5, in", "thinks it does and kills it on long jobs __MRO__ = \"\"\" stage", "this stage takes > 1 thread. Martian thinks it does and kills it", "key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks': chunks, 'join': {'__mem_gb' : 1}}", "now, only compute for gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5,", "matrix_h5, in h5 clustering_h5, in bool skip, in int random_seed, in int max_clusters,", "key in SingleGenomeAnalysis.load_clustering_keys_from_h5(args.clustering_h5): chunks.append({ 'clustering_key': key, '__mem_gb': chunk_mem_gb, '__threads': threads, }) return {'chunks':", "args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix, clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key,", "gene expression features matrix = matrix.select_features_by_type(lib_constants.GENE_EXPRESSION_LIBRARY_TYPE) clustering = SingleGenomeAnalysis.load_clustering_from_h5(args.clustering_h5, args.clustering_key) diffexp = cr_diffexp.run_differential_expression(matrix,", "clustering.clusters) with analysis_io.open_h5_for_writing(outs.diffexp_h5) as f: cr_diffexp.save_differential_expression_h5(f, args.clustering_key, diffexp) cr_diffexp.save_differential_expression_csv(args.clustering_key, diffexp, matrix, outs.diffexp_csv) def", "Add one for reasons unknown matrix_mem_gb = 1.8 * cr_matrix.CountMatrix.get_mem_gb_from_matrix_h5(args.matrix_h5) chunk_mem_gb = int(max(matrix_mem_gb," ]
[ "not hashable and cannot be put into a set. Examples ======== >>> from", "a certificate for the graph gr adjacency list The graph is assumed to", ">>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is", ">>> b = Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3, 4,", "from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S", "= current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True def _verify_centralizer(group, arg,", "from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form", "= _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i])", "2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1 =", "two component tensors of type `i` None no symmetry 0 commuting 1 anticommuting", "else return the array form of the permutation representing the canonical form of", "candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if", "This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup,", ">>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A =", "gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i in", "for tensors of type `i` base_i, gens_i BSGS for tensors of this type", "of the graph. The canonical form of the tensor gives a certificate for", "This is a brute force implementation that goes over all elements of the", "a naive implementation using the definition of a base and a strong generating", "size-2) if isinstance(sym, int): num_types = 1 dummies = [dummies] sym = [sym]", "for testing purposes. Since the array form of a permutation is currently a", "of elements for the centralizer of a subgroup/set/element. This is a brute force", "2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3],", "{tuple(a) for a in first} == \\ {tuple(a) for a in second} def", "2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import", "2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3,", "return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness of", "= tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev = (0,)*size for h", "[0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v = [] for i", "it. There are other procedures for verifying a base and strong generating set,", "3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4],", "contracted when they correspond to the same line of the graph. The canonical", "0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0, *v) return can", "= set() for s in S.generate(af=True): h = _af_rmul(g, s) for d in", "using the definition of a base and a strong generating set relative to", "symmetry 0 commuting 1 anticommuting Return 0 if the tensor is zero, else", "_verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A)", "if h[-1] != prev[-1]: return 0 prev = h return list(a[0]) def graph_certificate(gr):", "Since the array form of a permutation is currently a list, Permutation is", "True \"\"\" return {tuple(a) for a in first} == \\ {tuple(a) for a", "sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices:", "is a brute force implementation that goes over all elements of the group", "commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other,", "\\ {tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import", "return 0 prev = h return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate", "dummies list of dummy indices msym symmetry of the metric v is a", "over all elements of the group and checks for membership in the centralizer.", ">>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3,", "sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for", "8, 10, 12, 3, 14, 16, 18, 5, 9, 15, 7, 11, 17,", "5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3,", "[a, b, c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True", "!= candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False", "for a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\"", "return the array form of the permutation representing the canonical form of the", "v is a list of (base_i, gens_i, n_i, sym_i) for tensors of type", "number of lines of the graph num_indices = 0 for v, neigh in", "i in range(len(vlen)): n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base,", "items] i = 0 for v, neigh in items: for v2 in neigh:", "prev[:-2]: if h[-1] != prev[-1]: return 0 prev = h return list(a[0]) def", "# the odd index to the other vertex vertices = [[] for i", "_af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in", "is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import", "def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of", "centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group,", "g = [] for v in vertices: g.extend(v) assert len(g) == num_indices g", "elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af)", "tensors of type `i` None no symmetry 0 commuting 1 anticommuting Return 0", "= PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed", "from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = []", "3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0,", "``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup", "= [] if not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else:", "sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>>", "True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure = group.normal_closure(arg)", "graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8, 10, 12, 3, 14,", "2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh", "sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure = group.normal_closure(arg) conjugates = set() if", "and a strong generating set relative to it. There are other procedures for", "tensor is zero, else return the array form of the permutation representing the", "list of (base_i, gens_i, n_i, sym_i) for tensors of type `i` base_i, gens_i", "[dummies] sym = [sym] else: num_types = len(sym) dgens = [] for i", "ot tensors of type `i` sym_i symmetry under exchange of two component tensors", "g = g.array_form st = set() for s in S.generate(af=True): h = _af_rmul(g,", "closure of a subgroup/subset/element in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure", "_distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if", "sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1,", "centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from", "base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0))", "0 for v, neigh in items: num_indices += len(neigh) # associate to each", "from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure of a subgroup/subset/element in", "Return 0 if the tensor is zero, else return the array form of", "h)) st.add(q) a = list(st) a.sort() prev = (0,)*size for h in a:", "- 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist", "sym[i], size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in", "gens_i, n_i, sym_i) for tensors of type `i` base_i, gens_i BSGS for tensors", "of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can", "c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items", "arg, centr=None): \"\"\" Verify the centralizer of a group/set/element inside another group. This", "the group and checks for membership in the centralizer. It is used to", "PermutationGroup \"\"\" Verify the normal closure of a subgroup/subset/element in a group. This", "3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1,", "a group/set/element inside another group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups``", "'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen", "======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure", "correspond to the same line of the graph. The canonical form of the", "which comes first in items, # the odd index to the other vertex", "for a in first} == \\ {tuple(a) for a in second} def _naive_list_centralizer(self,", "... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>>", "symmetry under exchange of two component tensors of type `i` None no symmetry", "None no symmetry 0 commuting 1 anticommuting Return 0 if the tensor is", "naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor", "False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True def", "else: num_types = len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i],", "1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer,", "= graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1,", "procedures for verifying a base and strong generating set, but this one will", "centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None:", "a list, Permutation is not hashable and cannot be put into a set.", "1 v = [] for i in range(len(vlen)): n = vlen[i] if n:", "prev = h return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for the", "3, 5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 =", "of (base_i, gens_i, n_i, sym_i) for tensors of type `i` base_i, gens_i BSGS", "num_types = len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size", "_naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of elements", "list(D.generate(af=True)) g = g.array_form st = set() for s in S.generate(af=True): h =", "4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2,", "the graph num_indices = 0 for v, neigh in items: num_indices += len(neigh)", "2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5],", "_naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base,", "= v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens =", "centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See", "each vertex its indices; for each line # between two vertices assign the", "return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of a group/set/element", "n_i, sym_i) for tensors of type `i` base_i, gens_i BSGS for tensors of", "= SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3,", "brute force implementation that goes over all elements of the group and checks", "used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer", "import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]),", "5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations", "<reponame>ethankward/sympy<filename>sympy/combinatorics/testutil.py from sympy.combinatorics import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def", "the array form of the permutation representing the canonical form of the tensor.", "a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1,", "of a base and a strong generating set relative to it. There are", "for tensors of this type n_i number ot tensors of type `i` sym_i", "the centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>>", "gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4],", "if centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group,", "def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure", "of the graph num_indices = 0 for v, neigh in items: num_indices +=", "generating set, but this one will serve for more robust testing. Examples ========", "used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup)", "st = set() for s in S.generate(af=True): h = _af_rmul(g, s) for d", "sym_i symmetry under exchange of two component tensors of type `i` None no", "in items: num_indices += len(neigh) # associate to each vertex its indices; for", "in items, # the odd index to the other vertex vertices = [[]", "the centralizer of a group/set/element inside another group. This is used for testing", "n_i number ot tensors of type `i` sym_i symmetry under exchange of two", "< pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for v in", "= h return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for the graph", "lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = [] if not", "indices msym symmetry of the metric v is a list of (base_i, gens_i,", "dummies = [dummies] sym = [sym] else: num_types = len(sym) dgens = []", "algorithm to get the certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil", "def graph_certificate(gr): \"\"\" Return a certificate for the graph gr adjacency list The", "if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for", "gens) current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order()", "v, neigh in items: num_indices += len(neigh) # associate to each vertex its", "x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens)", "centr=None): \"\"\" Verify the centralizer of a group/set/element inside another group. This is", "to get the certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil import", "# associate to each vertex its indices; for each line # between two", "4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2,", "the normal closure of a subgroup/subset/element in a group. This is used to", "return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify", "PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed by", "items: num_indices += len(neigh) # associate to each vertex its indices; for each", "1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3,", "component tensors of type `i` None no symmetry 0 commuting 1 anticommuting Return", "for s in S.generate(af=True): h = _af_rmul(g, s) for d in dlist: q", "4, 3]) >>> c = Permutation([3, 4, 0, 1, 2]) >>> ls1 =", "_af_rmul v1 = [] for i in range(len(v)): base_i, gens_i, n_i, sym_i =", "for each line # between two vertices assign the # even index to", "= Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations as sets.", "currently a list, Permutation is not hashable and cannot be put into a", "4, 0, 1, 2]) >>> ls1 = [a, b, c] >>> ls2 =", "AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\"", "Canonicalize tensor formed by tensors of the different types g permutation representing the", "sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in", "group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from", "a subgroup/subset/element in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ========", "0 if the tensor is zero, else return the array form of the", "from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i", "num_indices = 0 for v, neigh in items: num_indices += len(neigh) # associate", "sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True))", "hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for gen in subgr_gens:", "`i` None no symmetry 0 commuting 1 anticommuting Return 0 if the tensor", "isinstance(sym, int): num_types = 1 dummies = [dummies] sym = [sym] else: num_types", "S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True))", "v in vertices: g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices +", "graph a symmetric tensor with number of indices equal to the degree of", "a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a in first} ==", "AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>>", "between two vertices assign the # even index to the vertex which comes", "sym, *v): \"\"\" Canonicalize tensor formed by tensors of the different types g", "tensor formed by tensors of the different types g permutation representing the tensor", "import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims()", "gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g,", "a certificate for the graph. This is not an efficient algorithm to get", "3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if", "from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5)", "graph. The canonical form of the tensor gives a certificate for the graph.", "True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr", "3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]}", "Permutation, _af_rmul v1 = [] for i in range(len(v)): base_i, gens_i, n_i, sym_i", "to the other vertex vertices = [[] for i in items] i =", "are contracted when they correspond to the same line of the graph. The", "and strong generating set. This is a naive implementation using the definition of", "b = Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3, 4, 0,", "is currently a list, Permutation is not hashable and cannot be put into", "sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>>", "PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() !=", "g.array_form st = set() for s in S.generate(af=True): h = _af_rmul(g, s) for", "checks for membership in the centralizer. It is used to test ``.centralizer()`` from", "======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D", "A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S,", "======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3, 5],", "S.generate(af=True): h = _af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d, h))", "items, # the odd index to the other vertex vertices = [[] for", "inside another group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ========", "This is not an efficient algorithm to get the certificate of a graph.", "number ot tensors of type `i` sym_i symmetry under exchange of two component", "testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup)", "= SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also", "import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup", "size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens])", "assign the # even index to the vertex which comes first in items,", "+= [num_indices, num_indices + 1] size = num_indices + 2 assert sorted(g) ==", "as sets. This is used for testing purposes. Since the array form of", "= [a, b, c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2)", "BSGS for tensors of this type n_i number ot tensors of type `i`", "def _cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations as sets. This is", "dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev = (0,)*size", "of a base and strong generating set. This is a naive implementation using", "i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer", "_cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations as sets. This is used", "size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int):", "the metric v is a list of (base_i, gens_i, n_i, sym_i) for tensors", "conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens", "len(g) == num_indices g += [num_indices, num_indices + 1] size = num_indices +", "A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ========", "el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize", "True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items =", "= PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st", "in gens) centralizer_list = [] if not af: for element in elements: if", "be unoriented and without external lines. Associate to each vertex of the graph", "12, 3, 14, 16, 18, 5, 9, 15, 7, 11, 17, 13, 19,", "line # between two vertices assign the # even index to the vertex", "in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element)", "return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group,", "in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x)", "5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2)", "This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups", "the certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>>", "two vertices assign the # even index to the vertex which comes first", "import _verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr =", "_verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup", "group/set/element inside another group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples", "(base_i, gens_i, n_i, sym_i) for tensors of type `i` base_i, gens_i BSGS for", "representing the canonical form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import", "= [] for i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i,", "4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2,", "= list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x in items]", "vertex; indices are contracted when they correspond to the same line of the", "dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1 dummies = [dummies] sym", "5, 9, 15, 7, 11, 17, 13, 19, 20, 21] >>> c1 ==", "exchange of two component tensors of type `i` None no symmetry 0 commuting", "canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0, 2, 1, 3, 4,", "4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2", "g += [num_indices, num_indices + 1] size = num_indices + 2 assert sorted(g)", "_naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import", "D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0,", "else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'):", "for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False", "the vertex which comes first in items, # the odd index to the", "AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr)", "af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of elements for the", "elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for gen in", "get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0, 2, 1,", "of a permutation is currently a list, Permutation is not hashable and cannot", ">>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if", "import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x", "if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif", "[] if not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for", "for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from", "1 anticommuting Return 0 if the tensor is zero, else return the array", "sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for", "Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4,", "def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of a group/set/element inside another", "for membership in the centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``.", "type `i` None no symmetry 0 commuting 1 anticommuting Return 0 if the", "1]) >>> b = Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3,", "naive implementation using the definition of a base and a strong generating set", "for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort()", "indices are contracted when they correspond to the same line of the graph.", "9, 15, 7, 11, 17, 13, 19, 20, 21] >>> c1 == c2", "af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\"", "of the metric v is a list of (base_i, gens_i, n_i, sym_i) for", "dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x", "4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0,", "num_indices += len(neigh) # associate to each vertex its indices; for each line", "commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif", "not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in", "sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens", "put into a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from", ">>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True", "= Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1, 2, 0, 4,", "2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist =", "without external lines. Associate to each vertex of the graph a symmetric tensor", "vertices[pvert[v2]].append(i+1) i += 2 g = [] for v in vertices: g.extend(v) assert", "the definition of a base and a strong generating set relative to it.", ">>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See Also", "elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for", "no symmetry 0 commuting 1 anticommuting Return 0 if the tensor is zero,", "gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2)", "line of the graph. The canonical form of the tensor gives a certificate", "neigh in items: num_indices += len(neigh) # associate to each vertex its indices;", "_verify_bsgs(group, base, gens): \"\"\" Verify the correctness of a base and strong generating", "= _af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a", "items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x in", "Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3,", "0)) [0, 2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from", "strong generating set. This is a naive implementation using the definition of a", "from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import", "i in items] i = 0 for v, neigh in items: for v2", "pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for v", "for the graph gr adjacency list The graph is assumed to be unoriented", "certificate for the graph gr adjacency list The graph is assumed to be", "from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2,", "tensor gives a certificate for the graph. This is not an efficient algorithm", "0 for v, neigh in items: for v2 in neigh: if pvert[v] <", "len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2))", "relative to it. There are other procedures for verifying a base and strong", "range(len(vlen)): n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n,", "if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1:", "gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can =", "size = num_indices + 2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen", "= list(D.generate(af=True)) g = g.array_form st = set() for s in S.generate(af=True): h", "for i in items] i = 0 for v, neigh in items: for", "_verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See", ">>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4,", "st.add(q) a = list(st) a.sort() prev = (0,)*size for h in a: if", "= g.array_form st = set() for s in S.generate(af=True): h = _af_rmul(g, s)", "closure is None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens", "sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2,", "= lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = [] if", "array form of the permutation representing the canonical form of the tensor. Examples", "PermutationGroup \"\"\" Return a list of elements for the centralizer of a subgroup/set/element.", "(base2, gens2, 2, 0)) [0, 2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups", "from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of elements for the centralizer", "set() for s in S.generate(af=True): h = _af_rmul(g, s) for d in dlist:", "It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil", "sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S =", "PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 =", "+ 1] size = num_indices + 2 assert sorted(g) == list(range(size)) g =", "Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v =", "def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness of a base and strong", "num_indices g += [num_indices, num_indices + 1] size = num_indices + 2 assert", "return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed by tensors", "sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr", ">>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0,", "_verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None:", "from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i in range(len(v)): base_i,", "tensors of the different types g permutation representing the tensor dummies list of", "indices; for each line # between two vertices assign the # even index", "\"\"\" Return a list of elements for the centralizer of a subgroup/set/element. This", "sympy.combinatorics import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second):", "v1 = [] for i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i]", "generating set relative to it. There are other procedures for verifying a base", "even index to the vertex which comes first in items, # the odd", "the same line of the graph. The canonical form of the tensor gives", "graph is assumed to be unoriented and without external lines. Associate to each", "base_i, gens_i BSGS for tensors of this type n_i number ot tensors of", "a subgroup/set/element. This is a brute force implementation that goes over all elements", "from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base,", "the different types g permutation representing the tensor dummies list of dummy indices", ">>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D =", "form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from", "for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D", "tensor dummies list of dummy indices msym symmetry of the metric v is", "list The graph is assumed to be unoriented and without external lines. Associate", "\"\"\" Compare two lists of permutations as sets. This is used for testing", ">>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3,", "af) def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness of a base and", "the correctness of a base and strong generating set. This is a naive", "= set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens =", "import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]),", "types g permutation representing the tensor dummies list of dummy indices msym symmetry", "graph gr adjacency list The graph is assumed to be unoriented and without", "sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4],", "lines of the graph num_indices = 0 for v, neigh in items: num_indices", "PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S", "tensors of type `i` sym_i symmetry under exchange of two component tensors of", "arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup", "the # even index to the vertex which comes first in items, #", ">>> ls1 = [a, b, c] >>> ls2 = [b, c, a] >>>", "of lines of the graph num_indices = 0 for v, neigh in items:", "_naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness of a", "centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list,", "from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from", "{0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5],", "= [] for v in vertices: g.extend(v) assert len(g) == num_indices g +=", "generating set. This is a naive implementation using the definition of a base", "from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g =", "a base and strong generating set, but this one will serve for more", "from sympy.combinatorics import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first,", "5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1,", "of the tensor are twice the number of lines of the graph num_indices", "closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif", "of the graph a symmetric tensor with number of indices equal to the", "hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af)", "strong generating set relative to it. There are other procedures for verifying a", "vertices: g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices + 1] size", "\"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import", "force implementation that goes over all elements of the group and checks for", "goes over all elements of the group and checks for membership in the", "range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase,", "n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1)", "ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for", "conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v):", "this type n_i number ot tensors of type `i` sym_i symmetry under exchange", "in a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0 prev", "sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert =", "of a subgroup/set/element. This is a brute force implementation that goes over all", "(SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation", "in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev =", "dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S", "======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure = group.normal_closure(arg) conjugates = set()", "4, 1]) >>> b = Permutation([1, 2, 0, 4, 3]) >>> c =", "return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True", "centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other,", "purposes. Since the array form of a permutation is currently a list, Permutation", "canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>>", "canonical form of the tensor gives a certificate for the graph. This is", "7, 11, 17, 13, 19, 20, 21] >>> c1 == c2 True \"\"\"", "symmetric tensor with number of indices equal to the degree of the vertex;", "they correspond to the same line of the graph. The canonical form of", "2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]}", "pvert = [x[0] for x in items] pvert = _af_invert(pvert) # the indices", "comes first in items, # the odd index to the other vertex vertices", "n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0, *v) return", "permutation is currently a list, Permutation is not hashable and cannot be put", "This is used for testing purposes. Since the array form of a permutation", "gens = [x._array_form for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen)", "element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element):", "another group. This is used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>>", "gens_i BSGS for tensors of this type n_i number ot tensors of type", "= Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a, b, c] >>>", "af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\"", "this one will serve for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups", "to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>>", "*v): \"\"\" Canonicalize tensor formed by tensors of the different types g permutation", "= gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1", "x in items] pvert = _af_invert(pvert) # the indices of the tensor are", "set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists", "a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups", "+= 1 v = [] for i in range(len(vlen)): n = vlen[i] if", "import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A", "certificate for the graph. This is not an efficient algorithm to get the", "A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\"", "Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4, 5]) >>> base2,", "= DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])]", "neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = []", "= vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse()", "This is a naive implementation using the definition of a base and a", ">>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a in first} == \\", "the array form of a permutation is currently a list, Permutation is not", "for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return", "sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1]) >>> b", "sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group", ">>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S =", "elements of the group and checks for membership in the centralizer. It is", "(SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>>", "gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0,", "sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types", "0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0,", "4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0,", "20, 21] >>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from", "from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda", "`i` base_i, gens_i BSGS for tensors of this type n_i number ot tensors", "of type `i` base_i, gens_i BSGS for tensors of this type n_i number", "== prev[:-2]: if h[-1] != prev[-1]: return 0 prev = h return list(a[0])", "Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer", ">>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import", "1, 2]) >>> ls1 = [a, b, c] >>> ls2 = [b, c,", "[sym] else: num_types = len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i],", "in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size,", "h in a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0", "from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert", "dummies, sym, *v): \"\"\" Canonicalize tensor formed by tensors of the different types", "Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements =", "for x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st = set()", "a brute force implementation that goes over all elements of the group and", "centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg,", "of the group and checks for membership in the centralizer. It is used", "elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens = lambda", "msym symmetry of the metric v is a list of (base_i, gens_i, n_i,", "the canonical form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive", "sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation,", "base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens", "from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from", "num_types = 1 dummies = [dummies] sym = [sym] else: num_types = len(sym)", "odd index to the other vertex vertices = [[] for i in items]", "list of dummy indices msym symmetry of the metric v is a list", "is a list of (base_i, gens_i, n_i, sym_i) for tensors of type `i`", "indices equal to the degree of the vertex; indices are contracted when they", "reverse=True) pvert = [x[0] for x in items] pvert = _af_invert(pvert) # the", "that goes over all elements of the group and checks for membership in", "5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>>", "D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer", "base, gens): \"\"\" Verify the correctness of a base and strong generating set.", "import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3)", "each vertex of the graph a symmetric tensor with number of indices equal", "for the graph. This is not an efficient algorithm to get the certificate", "c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return", "sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil", "unoriented and without external lines. Associate to each vertex of the graph a", "list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] +=", "current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return", "\"\"\" Return a certificate for the graph gr adjacency list The graph is", "to the vertex which comes first in items, # the odd index to", "if not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element", "set, but this one will serve for more robust testing. Examples ======== >>>", "vlen[len(neigh)] += 1 v = [] for i in range(len(vlen)): n = vlen[i]", "implementation using the definition of a base and a strong generating set relative", "dummy indices msym symmetry of the metric v is a list of (base_i,", "'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'):", "== list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)]", "A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure", "3, 2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2,", "test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from", "is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ...", "= AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims", "'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness", "definition of a base and a strong generating set relative to it. There", "= PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g", "None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators", "import Permutation, _af_rmul v1 = [] for i in range(len(v)): base_i, gens_i, n_i,", "n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0))", "centralizer of a group/set/element inside another group. This is used for testing ``.centralizer()``", ">>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import", "arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure of a", "adjacency list The graph is assumed to be unoriented and without external lines.", "for the centralizer of a subgroup/set/element. This is a brute force implementation that", "D = PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form", ">>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1,", "to be unoriented and without external lines. Associate to each vertex of the", "from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4)", "strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate =", "more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil", "dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1 dummies =", "sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x:", ">>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3)", "2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4],", "of type `i` sym_i symmetry under exchange of two component tensors of type", "A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups", "in range(len(vlen)): n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens,", "n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices))", "commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = []", "PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2", "18, 5, 9, 15, 7, 11, 17, 13, 19, 20, 21] >>> c1", "======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer =", "of two component tensors of type `i` None no symmetry 0 commuting 1", "get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for", "arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for gen", "type n_i number ot tensors of type `i` sym_i symmetry under exchange of", "sym_i) for tensors of type `i` base_i, gens_i BSGS for tensors of this", "v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym,", "centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from", "A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is", "graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1,", "3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import", "= len(sym) dgens = [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size -", "1, 8, 10, 12, 3, 14, 16, 18, 5, 9, 15, 7, 11,", "14, 16, 18, 5, 9, 15, 7, 11, 17, 13, 19, 20, 21]", "canonical form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>>", "1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>>", "PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens):", "are twice the number of lines of the graph num_indices = 0 for", "elements for the centralizer of a subgroup/set/element. This is a brute force implementation", "4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from", "two lists of permutations as sets. This is used for testing purposes. Since", "testing purposes. Since the array form of a permutation is currently a list,", "vlen[i] if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies", "+= len(neigh) # associate to each vertex its indices; for each line #", "= arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens =", "vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v = []", "1] size = num_indices + 2 assert sorted(g) == list(range(size)) g = Permutation(g)", "c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6,", "g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1", "for x in items] pvert = _af_invert(pvert) # the indices of the tensor", "h return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for the graph gr", "type `i` sym_i symmetry under exchange of two component tensors of type `i`", "the other vertex vertices = [[] for i in items] i = 0", "from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two", "all elements of the group and checks for membership in the centralizer. It", "a.sort() prev = (0,)*size for h in a: if h[:-2] == prev[:-2]: if", "int): num_types = 1 dummies = [dummies] sym = [sym] else: num_types =", "= [x._array_form for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for", "import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import", "2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5],", "21] >>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can", "'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def", "permutation representing the canonical form of the tensor. Examples ======== >>> from sympy.combinatorics.testutil", "= get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g,", "4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2 =", "= AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A,", "PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g =", "4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1)", "indices of the tensor are twice the number of lines of the graph", "elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]),", "``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups", "current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True def _verify_centralizer(group,", "AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from", ">>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2,", "neigh in items: for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1)", "import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D,", ">>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from", "[arg] for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure", "\"\"\" Verify the centralizer of a group/set/element inside another group. This is used", "2, 3, 4]} >>> c1 = graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1", "1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'):", "6, 1, 8, 10, 12, 3, 14, 16, 18, 5, 9, 15, 7,", "c1 == c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs,", "for verifying a base and strong generating set, but this one will serve", "!= prev[-1]: return 0 prev = h return list(a[0]) def graph_certificate(gr): \"\"\" Return", "if current_stabilizer.order() != 1: return False return True def _verify_centralizer(group, arg, centr=None): \"\"\"", "to it. There are other procedures for verifying a base and strong generating", "serve for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>>", "import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A,", "other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of elements for", "for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure =", "2, 0)) [0, 2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup", "10, 12, 3, 14, 16, 18, 5, 9, 15, 7, 11, 17, 13,", "= Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v", "[Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\"", "\"\"\" Canonicalize tensor formed by tensors of the different types g permutation representing", "gens) centralizer_list = [] if not af: for element in elements: if commutes_with_gens(element):", "robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import", "in the centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ========", "\"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for", "from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import", "b, c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\"", "is assumed to be unoriented and without external lines. Associate to each vertex", "second): \"\"\" Compare two lists of permutations as sets. This is used for", "dgens]) dlist = list(D.generate(af=True)) g = g.array_form st = set() for s in", "+= 2 g = [] for v in vertices: g.extend(v) assert len(g) ==", "subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens", "sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym,", "import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>>", "= [] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S =", "permutation representing the tensor dummies list of dummy indices msym symmetry of the", "representing the tensor dummies list of dummy indices msym symmetry of the metric", "of type `i` None no symmetry 0 commuting 1 anticommuting Return 0 if", "4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5],", "= group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive)", "gens2, 2, 0)) [0, 2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import", "items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x in items] pvert =", "for v, neigh in items: num_indices += len(neigh) # associate to each vertex", "vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for v in vertices: g.extend(v)", "= [x[0] for x in items] pvert = _af_invert(pvert) # the indices of", "Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with", "list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None):", "2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3,", "= group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg,", "sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>>", "sets. This is used for testing purposes. Since the array form of a", "for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in", "c1 [0, 2, 4, 6, 1, 8, 10, 12, 3, 14, 16, 18,", "current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return True def _verify_centralizer(group, arg, centr=None):", "prev[-1]: return 0 prev = h return list(a[0]) def graph_certificate(gr): \"\"\" Return a", "in first} == \\ {tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False):", "= num_indices + 2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen =", "centralizer of a subgroup/set/element. This is a brute force implementation that goes over", "c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8, 10, 12,", "s) for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st)", "vertex which comes first in items, # the odd index to the other", "equal to the degree of the vertex; indices are contracted when they correspond", "i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i))", "centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'):", "15, 7, 11, 17, 13, 19, 20, 21] >>> c1 == c2 True", "2, 3, 4, 1]) >>> b = Permutation([1, 2, 0, 4, 3]) >>>", "_verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0,", "pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g = [] for v in vertices:", "in S.generate(af=True): h = _af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d,", "implementation that goes over all elements of the group and checks for membership", "\"\"\" Verify the normal closure of a subgroup/subset/element in a group. This is", ">>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0,", "PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify the correctness of a base", "5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>>", "the centralizer of a subgroup/set/element. This is a brute force implementation that goes", "gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1 dummies", "for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def", "is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import", "5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2,", "in items] pvert = _af_invert(pvert) # the indices of the tensor are twice", "See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr =", "Return a list of elements for the centralizer of a subgroup/set/element. This is", ">>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True", "for neigh in vertices: vlen[len(neigh)] += 1 v = [] for i in", "\"\"\" return {tuple(a) for a in first} == \\ {tuple(a) for a in", "each line # between two vertices assign the # even index to the", "_verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of a group/set/element inside another group.", "subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino():", "Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>>", "Compare two lists of permutations as sets. This is used for testing purposes.", "17, 13, 19, 20, 21] >>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations", "testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs", "will serve for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import AlternatingGroup", "= group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order():", "= _af_invert(pvert) # the indices of the tensor are twice the number of", "PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also ========", "0, 1, 2]) >>> ls1 = [a, b, c] >>> ls2 = [b,", "of a subgroup/subset/element in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples", "3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0,", "from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>>", "is zero, else return the array form of the permutation representing the canonical", "index to the other vertex vertices = [[] for i in items] i", "= 0 for v, neigh in items: for v2 in neigh: if pvert[v]", "in vertices: g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices + 1]", ">>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil", "if the tensor is zero, else return the array form of the permutation", ">>> c = Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a, b,", "See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure = group.normal_closure(arg) conjugates", "zero, else return the array form of the permutation representing the canonical form", "tensor with number of indices equal to the degree of the vertex; indices", "vertices: vlen[len(neigh)] += 1 v = [] for i in range(len(vlen)): n =", "======== >>> from sympy.combinatorics.named_groups import AlternatingGroup >>> from sympy.combinatorics.testutil import _verify_bsgs >>> A", "# between two vertices assign the # even index to the vertex which", "test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from", "cannot be put into a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation", "element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self,", "canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x", "set relative to it. There are other procedures for verifying a base and", "3, 14, 16, 18, 5, 9, 15, 7, 11, 17, 13, 19, 20,", "3, 4, 1]) >>> b = Permutation([1, 2, 0, 4, 3]) >>> c", "an efficient algorithm to get the certificate of a graph. Examples ======== >>>", "list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for the graph gr adjacency list", "sym, size-2) if isinstance(sym, int): num_types = 1 dummies = [dummies] sym =", "i = 0 for v, neigh in items: for v2 in neigh: if", "neigh in vertices: vlen[len(neigh)] += 1 v = [] for i in range(len(vlen)):", "# even index to the vertex which comes first in items, # the", "graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 = {0:[1, 2,", "subgr_gens = [arg] for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^", "in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return", "and without external lines. Associate to each vertex of the graph a symmetric", "sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer", "is used for testing purposes. Since the array form of a permutation is", "A.base, A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr", "closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure of a subgroup/subset/element", "= [sym] else: num_types = len(sym) dgens = [] for i in range(num_types):", "h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0 prev = h return", "``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups", "closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed by tensors of", "tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs", "d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev", "return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return", "gr adjacency list The graph is assumed to be unoriented and without external", "list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0] for x in items] pvert", "g = Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2)", ">>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics", "0 prev = h return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for", "list, Permutation is not hashable and cannot be put into a set. Examples", "in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer =", "with number of indices equal to the degree of the vertex; indices are", "sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists", "graph. This is not an efficient algorithm to get the certificate of a", "by tensors of the different types g permutation representing the tensor dummies list", "sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S,", "_cmp_perm_lists \"\"\" if centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive", "from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4,", "for gen in gens) centralizer_list = [] if not af: for element in", "gen in gens) centralizer_list = [] if not af: for element in elements:", "if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list", "into a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil", "_cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1,", "[x._array_form for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen", "in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from", ">>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5)", "q = tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev = (0,)*size for", "the tensor are twice the number of lines of the graph num_indices =", "centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other), af) elif hasattr(other, 'array_form'): return _naive_list_centralizer(self,", "def canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed by tensors of the", "g permutation representing the tensor dummies list of dummy indices msym symmetry of", "len(x[1]), reverse=True) pvert = [x[0] for x in items] pvert = _af_invert(pvert) #", "subgroup/set/element. This is a brute force implementation that goes over all elements of", "True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base,", "if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators]", "base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can", "import Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\"", "0, 4, 3]) >>> c = Permutation([3, 4, 0, 1, 2]) >>> ls1", "= [] for i in range(len(vlen)): n = vlen[i] if n: base, gens", "None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return", "base and strong generating set. This is a naive implementation using the definition", "See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens)", "of permutations as sets. This is used for testing purposes. Since the array", "= arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in group.generate_dimino(): for", "formed by tensors of the different types g permutation representing the tensor dummies", "of a group/set/element inside another group. This is used for testing ``.centralizer()`` from", "efficient algorithm to get the certificate of a graph. Examples ======== >>> from", "correctness of a base and strong generating set. This is a naive implementation", "the vertex; indices are contracted when they correspond to the same line of", "to each vertex of the graph a symmetric tensor with number of indices", "subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym,", "current_stabilizer = group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() !=", "if closure is None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'):", "import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i", "a symmetric tensor with number of indices equal to the degree of the", "v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies,", "get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies,", "in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st = set() for s", "base and a strong generating set relative to it. There are other procedures", "h = _af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q)", "s in S.generate(af=True): h = _af_rmul(g, s) for d in dlist: q =", "2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5],", "normal closure of a subgroup/subset/element in a group. This is used to test", "Return a certificate for the graph gr adjacency list The graph is assumed", "of this type n_i number ot tensors of type `i` sym_i symmetry under", "Verify the centralizer of a group/set/element inside another group. This is used for", "= dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types = 1 dummies = [dummies]", "is not hashable and cannot be put into a set. Examples ======== >>>", "the degree of the vertex; indices are contracted when they correspond to the", "3, 4], 3:[0, 2, 4], 4:[1, 2, 3, 5], 5:[0, 4]} >>> gr2", "for v in vertices: g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices", "{0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2,", "4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2,", "2, 3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations", "_cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the", "AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A =", "3:[1, 2, 4, 5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1", "i += 2 g = [] for v in vertices: g.extend(v) assert len(g)", "2]) >>> ls1 = [a, b, c] >>> ls2 = [b, c, a]", ">>> a = Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1, 2,", "graph_certificate(gr): \"\"\" Return a certificate for the graph gr adjacency list The graph", "dlist = list(D.generate(af=True)) g = g.array_form st = set() for s in S.generate(af=True):", "in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a", "candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order() != 1: return False return", "the tensor gives a certificate for the graph. This is not an efficient", "[] for i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens)", "not an efficient algorithm to get the certificate of a graph. Examples ========", "Verify the normal closure of a subgroup/subset/element in a group. This is used", "= graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8, 10, 12, 3,", "for v, neigh in items: for v2 in neigh: if pvert[v] < pvert[v2]:", "the odd index to the other vertex vertices = [[] for i in", "num_indices + 1] size = num_indices + 2 assert sorted(g) == list(range(size)) g", "= 1 dummies = [dummies] sym = [sym] else: num_types = len(sym) dgens", "_distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations", ">>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure", "_verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure of", "There are other procedures for verifying a base and strong generating set, but", "See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements", "3], 0, (base2, gens2, 2, 0)) [0, 2, 1, 3, 4, 5] \"\"\"", "======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr = group.centralizer(arg) centr_list", "assert sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for neigh in", "list(st) a.sort() prev = (0,)*size for h in a: if h[:-2] == prev[:-2]:", "array form of a permutation is currently a list, Permutation is not hashable", "elif hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify", "======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup", ">>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a =", "vertex its indices; for each line # between two vertices assign the #", "and cannot be put into a set. Examples ======== >>> from sympy.combinatorics.permutations import", "metric v is a list of (base_i, gens_i, n_i, sym_i) for tensors of", "x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st = set() for", "elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if commutes_with_gens(element): centralizer_list.append(element) return", "_cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a in first} == \\ {tuple(a)", "c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a in first}", "x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = [] if not af:", "SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ========", ">>> from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0,", "a base and strong generating set. This is a naive implementation using the", "of the vertex; indices are contracted when they correspond to the same line", "items] pvert = _af_invert(pvert) # the indices of the tensor are twice the", "of dummy indices msym symmetry of the metric v is a list of", "True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of a group/set/element inside", "in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list", "S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See", "Permutation is not hashable and cannot be put into a set. Examples ========", "3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0, 2, 4], 4:[1,", "to the degree of the vertex; indices are contracted when they correspond to", "the graph. The canonical form of the tensor gives a certificate for the", "0 commuting 1 anticommuting Return 0 if the tensor is zero, else return", "= PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i]) if current_stabilizer.order()", "======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from", "the tensor is zero, else return the array form of the permutation representing", "11, 17, 13, 19, 20, 21] >>> c1 == c2 True \"\"\" from", "\"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens =", "anticommuting Return 0 if the tensor is zero, else return the array form", "pvert = _af_invert(pvert) # the indices of the tensor are twice the number", "permutations as sets. This is used for testing purposes. Since the array form", "the tensor dummies list of dummy indices msym symmetry of the metric v", "\"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items())", "+ 2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1) for", "for i in range(len(vlen)): n = vlen[i] if n: base, gens = get_symmetric_group_sgs(i)", "gens): \"\"\" Verify the correctness of a base and strong generating set. This", "[[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if", "# the indices of the tensor are twice the number of lines of", "the permutation representing the canonical form of the tensor. Examples ======== >>> from", "[num_indices, num_indices + 1] size = num_indices + 2 assert sorted(g) == list(range(size))", "group and checks for membership in the centralizer. It is used to test", "and checks for membership in the centralizer. It is used to test ``.centralizer()``", "strong generating set, but this one will serve for more robust testing. Examples", "sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2,", "3]) >>> c = Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a,", "to each vertex its indices; for each line # between two vertices assign", "dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i in range(len(v)):", "degree of the vertex; indices are contracted when they correspond to the same", "======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True))", "hasattr(other, 'array_form'): return _naive_list_centralizer(self, PermutationGroup([other]), af) def _verify_bsgs(group, base, gens): \"\"\" Verify the", "== num_indices g += [num_indices, num_indices + 1] size = num_indices + 2", "= list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg,", "[0, 2, 1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can", "Permutation from sympy.combinatorics.util import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare", "1: return False return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer", "group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def", "sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal closure of a subgroup/subset/element in a", "a list of (base_i, gens_i, n_i, sym_i) for tensors of type `i` base_i,", "tuple(_af_rmul(d, h)) st.add(q) a = list(st) a.sort() prev = (0,)*size for h in", "prev = (0,)*size for h in a: if h[:-2] == prev[:-2]: if h[-1]", "The canonical form of the tensor gives a certificate for the graph. This", "a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0 prev =", "its indices; for each line # between two vertices assign the # even", "of indices equal to the degree of the vertex; indices are contracted when", "4])]) >>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\"", "[0, 2, 4, 6, 1, 8, 10, 12, 3, 14, 16, 18, 5,", "and strong generating set, but this one will serve for more robust testing.", "Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>>", "list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x,", "all(_af_commutes_with(x, gen) for gen in gens) centralizer_list = [] if not af: for", "tensors of this type n_i number ot tensors of type `i` sym_i symmetry", "= Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>>", "twice the number of lines of the graph num_indices = 0 for v,", "= list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens = lambda x:", "sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list of elements for the centralizer of", "x: len(x[1]), reverse=True) pvert = [x[0] for x in items] pvert = _af_invert(pvert)", "other procedures for verifying a base and strong generating set, but this one", ">>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>>", "for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements: if", "``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>>", "= _naive_list_centralizer(group, arg, af=True) return _cmp_perm_lists(centr_list, centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups", "group for i in range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return", "the graph gr adjacency list The graph is assumed to be unoriented and", "form of the tensor gives a certificate for the graph. This is not", "group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure)", "sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens = gens_products(*v1) dgens", "get the certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate", "= [0]*(len(vertices[0])+1) for neigh in vertices: vlen[len(neigh)] += 1 v = [] for", "v.append((base, gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0,", "gen) for gen in gens) centralizer_list = [] if not af: for element", "the number of lines of the graph num_indices = 0 for v, neigh", "set. This is a naive implementation using the definition of a base and", "[b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a in", "current_stabilizer.order() != 1: return False return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify", "lines. Associate to each vertex of the graph a symmetric tensor with number", "gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i, sym_i)) size, sbase, sgens =", "gives a certificate for the graph. This is not an efficient algorithm to", "ls1 = [a, b, c] >>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1,", "are other procedures for verifying a base and strong generating set, but this", "list of elements for the centralizer of a subgroup/set/element. This is a brute", "== \\ {tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups", "= AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if", "range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D = PermutationGroup([Permutation(x) for", ">>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>>", "arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg]", "19, 20, 21] >>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert", "external lines. Associate to each vertex of the graph a symmetric tensor with", "import PermutationGroup \"\"\" Return a list of elements for the centralizer of a", "!= 1: return False return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the", "3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists", ">>> g = Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2 =", "= (0,)*size for h in a: if h[:-2] == prev[:-2]: if h[-1] !=", "[] for i in range(len(vlen)): n = vlen[i] if n: base, gens =", "symmetry of the metric v is a list of (base_i, gens_i, n_i, sym_i)", "different types g permutation representing the tensor dummies list of dummy indices msym", "Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations as sets. This", "a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import", "return {tuple(a) for a in first} == \\ {tuple(a) for a in second}", "under exchange of two component tensors of type `i` None no symmetry 0", "to the same line of the graph. The canonical form of the tensor", "h[-1] != prev[-1]: return 0 prev = h return list(a[0]) def graph_certificate(gr): \"\"\"", "of the tensor gives a certificate for the graph. This is not an", "= Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3, 4, 0, 1,", "v, neigh in items: for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i)", "if n: base, gens = get_symmetric_group_sgs(i) v.append((base, gens, n, 0)) v.reverse() dummies =", "(0,)*size for h in a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]:", "subgroup/subset/element in a group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>>", ">>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8, 10,", "======== >>> from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a", "1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4, 5], 4:[1, 3,", "a permutation is currently a list, Permutation is not hashable and cannot be", "a = list(st) a.sort() prev = (0,)*size for h in a: if h[:-2]", "commuting 1 anticommuting Return 0 if the tensor is zero, else return the", "from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation,", "import PermutationGroup \"\"\" Verify the normal closure of a subgroup/subset/element in a group.", "5], 5:[0, 4]} >>> gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1,", "5], 4:[1, 3, 5], 5:[0, 2, 3, 4]} >>> c1 = graph_certificate(gr1) >>>", "DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2,", ">>> from sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A,", "PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)): candidate", "set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg", "^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies, sym, *v): \"\"\"", "sym = [sym] else: num_types = len(sym) dgens = [] for i in", "if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return 0 prev = h", "0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from sympy.combinatorics.permutations import _af_commutes_with if hasattr(other,", "associate to each vertex its indices; for each line # between two vertices", "vertices assign the # even index to the vertex which comes first in", "{tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup", "for h in a: if h[:-2] == prev[:-2]: if h[-1] != prev[-1]: return", "import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3,", "import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1]) >>> b =", "AlternatingGroup(3) >>> _verify_normal_closure(S, A, closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure", "closure=A) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure =", "\"\"\" if closure is None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg,", "the tensor. Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import", "i in range(num_types): dgens.extend(dummy_sgs(dummies[i], sym[i], size - 2)) S = PermutationGroup(sgens) D =", "assumed to be unoriented and without external lines. Associate to each vertex of", "= {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2, 4,", "= [[] for i in items] i = 0 for v, neigh in", "num_indices + 2 assert sorted(g) == list(range(size)) g = Permutation(g) vlen = [0]*(len(vertices[0])+1)", "hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el", "Verify the correctness of a base and strong generating set. This is a", "Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import", "other.generators] commutes_with_gens = lambda x: all(_af_commutes_with(x, gen) for gen in gens) centralizer_list =", "a strong generating set relative to it. There are other procedures for verifying", "hasattr(other, 'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens", "same line of the graph. The canonical form of the tensor gives a", "= [dummies] sym = [sym] else: num_types = len(sym) dgens = [] for", "= list(st) a.sort() prev = (0,)*size for h in a: if h[:-2] ==", "membership in the centralizer. It is used to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples", ">>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g", "graph_certificate(gr1) >>> c2 = graph_certificate(gr2) >>> c1 [0, 2, 4, 6, 1, 8,", "The graph is assumed to be unoriented and without external lines. Associate to", "a base and a strong generating set relative to it. There are other", "verifying a base and strong generating set, but this one will serve for", "2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer,", "rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists of permutations as", "'__getitem__'): subgr_gens = arg elif hasattr(arg, 'array_form'): subgr_gens = [arg] for el in", "= [arg] for el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el)", "but this one will serve for more robust testing. Examples ======== >>> from", "A.strong_gens) True See Also ======== sympy.combinatorics.perm_groups.PermutationGroup.schreier_sims \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup strong_gens_distr =", "of a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1 =", "el in group.generate_dimino(): for gen in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates))", "sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4, 5])", "Permutation([1, 3, 2, 0, 4, 5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g,", "= get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0, 2,", "if isinstance(sym, int): num_types = 1 dummies = [dummies] sym = [sym] else:", "a list of elements for the centralizer of a subgroup/set/element. This is a", "Examples ======== >>> from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>>", "tensor are twice the number of lines of the graph num_indices = 0", "if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif", "is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive = _naive_list_centralizer(group, arg, af=True)", "hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'): subgr_gens = arg elif hasattr(arg,", "import _distribute_gens_by_base rmul = Permutation.rmul def _cmp_perm_lists(first, second): \"\"\" Compare two lists of", "\"\"\" if centr is None: centr = group.centralizer(arg) centr_list = list(centr.generate_dimino(af=True)) centr_list_naive =", "group. This is used to test sympy.combinatorics.perm_groups.PermutationGroup.normal_closure Examples ======== >>> from sympy.combinatorics.named_groups import", "False return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of a", "of the permutation representing the canonical form of the tensor. Examples ======== >>>", "5]) >>> base2, gens2 = get_symmetric_group_sgs(2) >>> canonicalize_naive(g, [2, 3], 0, (base2, gens2,", "used for testing purposes. Since the array form of a permutation is currently", "lists of permutations as sets. This is used for testing purposes. Since the", "is None: closure = group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens =", "get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2,", "used for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup,", "a in first} == \\ {tuple(a) for a in second} def _naive_list_centralizer(self, other,", "= PermutationGroup([Permutation([0, 1, 2, 3, 4])]) >>> _verify_centralizer(S, A, centr) True See Also", "DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See", ">>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>>", "[2, 3], 0, (base2, gens2, 2, 0)) [0, 2, 1, 3, 4, 5]", "the graph a symmetric tensor with number of indices equal to the degree", "len(neigh) # associate to each vertex its indices; for each line # between", "af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element)) else: for element in elements:", "[[] for i in items] i = 0 for v, neigh in items:", "items: for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i +=", "2 g = [] for v in vertices: g.extend(v) assert len(g) == num_indices", "form of the permutation representing the canonical form of the tensor. Examples ========", "sgens = gens_products(*v1) dgens = dummy_sgs(dummies, sym, size-2) if isinstance(sym, int): num_types =", "a = Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1, 2, 0,", "import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul v1", "from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1]) >>>", "for testing ``.centralizer()`` from ``sympy.combinatorics.perm_groups`` Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ...", "sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs from sympy.combinatorics.permutations import Permutation, _af_rmul", "centralizer_list = [] if not af: for element in elements: if commutes_with_gens(element): centralizer_list.append(Permutation._af_new(element))", "graph num_indices = 0 for v, neigh in items: num_indices += len(neigh) #", ">>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True See Also", "from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4)", "one will serve for more robust testing. Examples ======== >>> from sympy.combinatorics.named_groups import", "in items] i = 0 for v, neigh in items: for v2 in", "_naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr = group.centralizer(arg) centr_list =", ">>> canonicalize_naive(g, [2, 3], 0, (base2, gens2, 2, 0)) [0, 2, 1, 3,", "= 0 for v, neigh in items: num_indices += len(neigh) # associate to", "index to the vertex which comes first in items, # the odd index", "range(len(base)): candidate = PermutationGroup(strong_gens_distr[i]) if current_stabilizer.order() != candidate.order(): return False current_stabilizer = current_stabilizer.stabilizer(base[i])", "return False return True def _verify_centralizer(group, arg, centr=None): \"\"\" Verify the centralizer of", "0, (base2, gens2, 2, 0)) [0, 2, 1, 3, 4, 5] \"\"\" from", "sympy.combinatorics.permutations import Permutation, _af_rmul v1 = [] for i in range(len(v)): base_i, gens_i,", "... AlternatingGroup) >>> from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A", "first in items, # the odd index to the other vertex vertices =", "Permutation([0, 2, 3, 4, 1]) >>> b = Permutation([1, 2, 0, 4, 3])", "Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr is None: centr = group.centralizer(arg)", "centr_list_naive) def _verify_normal_closure(group, arg, closure=None): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Verify the normal", "v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g", "assert len(g) == num_indices g += [num_indices, num_indices + 1] size = num_indices", "of the different types g permutation representing the tensor dummies list of dummy", "vertex vertices = [[] for i in items] i = 0 for v,", "the graph. This is not an efficient algorithm to get the certificate of", ">>> S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1,", "Examples ======== >>> from sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.testutil import", "import PermutationGroup strong_gens_distr = _distribute_gens_by_base(base, gens) current_stabilizer = group for i in range(len(base)):", "other vertex vertices = [[] for i in items] i = 0 for", ">>> ls2 = [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a)", "_naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D)", ">>> c1 [0, 2, 4, 6, 1, 8, 10, 12, 3, 14, 16,", "gr2 = {0:[1, 5], 1:[0, 2, 3, 4], 2:[1, 3, 5], 3:[1, 2,", "4, 6, 1, 8, 10, 12, 3, 14, 16, 18, 5, 9, 15,", "in vertices: vlen[len(neigh)] += 1 v = [] for i in range(len(vlen)): n", "to test ``.centralizer()`` from ``sympy.combinatorics.perm_groups``. Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>>", "import get_symmetric_group_sgs >>> from sympy.combinatorics import Permutation, PermutationGroup >>> g = Permutation([1, 3,", "_af_rmul(g, s) for d in dlist: q = tuple(_af_rmul(d, h)) st.add(q) a =", "PermutationGroup([Permutation(x) for x in dgens]) dlist = list(D.generate(af=True)) g = g.array_form st =", "_af_invert(pvert) # the indices of the tensor are twice the number of lines", "in items: for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i", "gens, n, 0)) v.reverse() dummies = list(range(num_indices)) can = canonicalize(g, dummies, 0, *v)", "import Permutation, PermutationGroup >>> g = Permutation([1, 3, 2, 0, 4, 5]) >>>", "vertex of the graph a symmetric tensor with number of indices equal to", "v = [] for i in range(len(vlen)): n = vlen[i] if n: base,", "group.normal_closure(arg) conjugates = set() if hasattr(arg, 'generators'): subgr_gens = arg.generators elif hasattr(arg, '__getitem__'):", "second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return a list", "return list(a[0]) def graph_certificate(gr): \"\"\" Return a certificate for the graph gr adjacency", "import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True) pvert = [x[0]", "SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2, 3, 4])])", "[x[0] for x in items] pvert = _af_invert(pvert) # the indices of the", "\"\"\" Verify the correctness of a base and strong generating set. This is", "type `i` base_i, gens_i BSGS for tensors of this type n_i number ot", "1 dummies = [dummies] sym = [sym] else: num_types = len(sym) dgens =", "= [b, c, a] >>> _cmp_perm_lists(ls1, ls2) True \"\"\" return {tuple(a) for a", "in elements: if commutes_with_gens(element): centralizer_list.append(element) return centralizer_list elif hasattr(other, 'getitem'): return _naive_list_centralizer(self, PermutationGroup(other),", "2, 4, 6, 1, 8, 10, 12, 3, 14, 16, 18, 5, 9,", "sympy.combinatorics.named_groups import (SymmetricGroup, ... AlternatingGroup) >>> from sympy.combinatorics.perm_groups import PermutationGroup >>> from sympy.combinatorics.permutations", "2, 0, 4, 3]) >>> c = Permutation([3, 4, 0, 1, 2]) >>>", "first} == \\ {tuple(a) for a in second} def _naive_list_centralizer(self, other, af=False): from", "13, 19, 20, 21] >>> c1 == c2 True \"\"\" from sympy.combinatorics.permutations import", "the indices of the tensor are twice the number of lines of the", "g.extend(v) assert len(g) == num_indices g += [num_indices, num_indices + 1] size =", "in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2 g =", "1, 2, 3]), Permutation([2, 3, 0, 1])] See Also ======== sympy.combinatorics.perm_groups.centralizer \"\"\" from", "c = Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a, b, c]", "'generators'): elements = list(self.generate_dimino(af=True)) gens = [x._array_form for x in other.generators] commutes_with_gens =", "import graph_certificate >>> gr1 = {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0,", "base and strong generating set, but this one will serve for more robust", "canonicalize_naive(g, dummies, sym, *v): \"\"\" Canonicalize tensor formed by tensors of the different", "a in second} def _naive_list_centralizer(self, other, af=False): from sympy.combinatorics.perm_groups import PermutationGroup \"\"\" Return", "== c2 True \"\"\" from sympy.combinatorics.permutations import _af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize", "3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products, dummy_sgs", "[] for v in vertices: g.extend(v) assert len(g) == num_indices g += [num_indices,", "Also ======== sympy.combinatorics.perm_groups.PermutationGroup.normal_closure \"\"\" if closure is None: closure = group.normal_closure(arg) conjugates =", "_naive_list_centralizer(D, D) [Permutation([0, 1, 2, 3]), Permutation([2, 3, 0, 1])] See Also ========", "Associate to each vertex of the graph a symmetric tensor with number of", "16, 18, 5, 9, 15, 7, 11, 17, 13, 19, 20, 21] >>>", "number of indices equal to the degree of the vertex; indices are contracted", "be put into a set. Examples ======== >>> from sympy.combinatorics.permutations import Permutation >>>", "Permutation([1, 2, 0, 4, 3]) >>> c = Permutation([3, 4, 0, 1, 2])", "from sympy.combinatorics.testutil import canonicalize_naive >>> from sympy.combinatorics.tensor_can import get_symmetric_group_sgs >>> from sympy.combinatorics import", "for i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i, [[]]*n_i,", "vertices = [[] for i in items] i = 0 for v, neigh", "for v2 in neigh: if pvert[v] < pvert[v2]: vertices[pvert[v]].append(i) vertices[pvert[v2]].append(i+1) i += 2", "sympy.combinatorics.testutil import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens)", "hashable and cannot be put into a set. Examples ======== >>> from sympy.combinatorics.permutations", "when they correspond to the same line of the graph. The canonical form", "from sympy.combinatorics.testutil import _verify_normal_closure >>> S = SymmetricGroup(3) >>> A = AlternatingGroup(3) >>>", "= {0:[1, 2, 3, 5], 1:[0, 2, 4], 2:[0, 1, 3, 4], 3:[0,", "`i` sym_i symmetry under exchange of two component tensors of type `i` None", "ls2) True \"\"\" return {tuple(a) for a in first} == \\ {tuple(a) for", "Examples ======== >>> from sympy.combinatorics.testutil import _naive_list_centralizer >>> from sympy.combinatorics.named_groups import DihedralGroup >>>", "from sympy.combinatorics.named_groups import DihedralGroup >>> D = DihedralGroup(4) >>> _naive_list_centralizer(D, D) [Permutation([0, 1,", "[] for i in range(len(v)): base_i, gens_i, n_i, sym_i = v[i] v1.append((base_i, gens_i,", "is not an efficient algorithm to get the certificate of a graph. Examples", "_af_invert from sympy.combinatorics.tensor_can import get_symmetric_group_sgs, canonicalize items = list(gr.items()) items.sort(key=lambda x: len(x[1]), reverse=True)", ">>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0, 2, 3, 4, 1])", "is a naive implementation using the definition of a base and a strong", "Permutation([3, 4, 0, 1, 2]) >>> ls1 = [a, b, c] >>> ls2", "from sympy.combinatorics.permutations import Permutation >>> from sympy.combinatorics.testutil import _cmp_perm_lists >>> a = Permutation([0,", "form of a permutation is currently a list, Permutation is not hashable and", "Permutation >>> from sympy.combinatorics.testutil import _verify_centralizer >>> S = SymmetricGroup(5) >>> A =", "1, 3, 4, 5] \"\"\" from sympy.combinatorics.perm_groups import PermutationGroup from sympy.combinatorics.tensor_can import gens_products,", "in subgr_gens: conjugates.add(gen ^ el) naive_closure = PermutationGroup(list(conjugates)) return closure.is_subgroup(naive_closure) def canonicalize_naive(g, dummies,", "certificate of a graph. Examples ======== >>> from sympy.combinatorics.testutil import graph_certificate >>> gr1", "S = SymmetricGroup(5) >>> A = AlternatingGroup(5) >>> centr = PermutationGroup([Permutation([0, 1, 2,", "import _verify_bsgs >>> A = AlternatingGroup(4) >>> A.schreier_sims() >>> _verify_bsgs(A, A.base, A.strong_gens) True", "_verify_centralizer(S, A, centr) True See Also ======== _naive_list_centralizer, sympy.combinatorics.perm_groups.PermutationGroup.centralizer, _cmp_perm_lists \"\"\" if centr", "tensors of type `i` base_i, gens_i BSGS for tensors of this type n_i" ]
[ "recaptcha_secret = \"\" # recaptcha secret for verification duplicate_annotations = False # Should", "training videos to be considered recaptcha_secret = \"\" # recaptcha secret for verification", "annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os", "Should the server allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) #", "to be considered recaptcha_secret = \"\" # recaptcha secret for verification duplicate_annotations =", "= 2 # the minimum number of training videos to be considered recaptcha_secret", "= \"http://localhost/\" # your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training =", "\"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the minimum number of training videos", "= \"\" # recaptcha secret for verification duplicate_annotations = False # Should the", "of training videos to be considered recaptcha_secret = \"\" # recaptcha secret for", "\"http://localhost/\" # your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2", "minimum number of training videos to be considered recaptcha_secret = \"\" # recaptcha", "# your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 #", "False # Should the server allow for duplicate annotations? import os.path import sys", "= False # Should the server allow for duplicate annotations? import os.path import", "considered recaptcha_secret = \"\" # recaptcha secret for verification duplicate_annotations = False #", "duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import", "host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the minimum number", "# recaptcha secret for verification duplicate_annotations = False # Should the server allow", "number of training videos to be considered recaptcha_secret = \"\" # recaptcha secret", "2 # the minimum number of training videos to be considered recaptcha_secret =", "server allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove", "local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the minimum", "videos to be considered recaptcha_secret = \"\" # recaptcha secret for verification duplicate_annotations", "import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os os.environ['PYTHON_EGG_CACHE'] = '/tmp/apache'", "import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os os.environ['PYTHON_EGG_CACHE']", "= \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the minimum number of training", "for verification duplicate_annotations = False # Should the server allow for duplicate annotations?", "be considered recaptcha_secret = \"\" # recaptcha secret for verification duplicate_annotations = False", "# Should the server allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__)))", "localhost = \"http://localhost/\" # your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training", "min_training = 2 # the minimum number of training videos to be considered", "\"\" # recaptcha secret for verification duplicate_annotations = False # Should the server", "for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server", "os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on server import os os.environ['PYTHON_EGG_CACHE'] =", "the server allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO:", "# the minimum number of training videos to be considered recaptcha_secret = \"\"", "allow for duplicate annotations? import os.path import sys sys.path.append(os.path.dirname(os.path.abspath(__file__))) # TODO: remove on", "server://user:pass@localhost/dbname min_training = 2 # the minimum number of training videos to be", "verification duplicate_annotations = False # Should the server allow for duplicate annotations? import", "your local host database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the", "recaptcha secret for verification duplicate_annotations = False # Should the server allow for", "# server://user:pass@localhost/dbname min_training = 2 # the minimum number of training videos to", "secret for verification duplicate_annotations = False # Should the server allow for duplicate", "duplicate_annotations = False # Should the server allow for duplicate annotations? import os.path", "the minimum number of training videos to be considered recaptcha_secret = \"\" #", "database = \"mysql://root@localhost/vaticChecker\" # server://user:pass@localhost/dbname min_training = 2 # the minimum number of" ]
[ "inaccurate. If you're having trouble with this class, don't waste your time, just", "function and cache the result. _localtime = None def get_default_timezone(): \"\"\" Returns the", "of the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns", "This function is designed for use by the template engine. \"\"\" should_convert =", "implementation taken from Python's docs. Used only when pytz isn't available, and most", "available, and most likely inaccurate. If you're having trouble with this class, don't", "def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day,", "time zone name, pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone", "from Python's docs. Used only when pytz isn't available. \"\"\" def __repr__(self): return", "\"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self,", "is not None: _active.value = self.old_timezone else: del _active.value # Templates def template_localtime(value,", "_time try: import pytz except ImportError: pytz = None from django.conf import settings", "or a time zone name. If it is a time zone name, pytz", "caller should ensure that they don't receive an invalid value like None. def", "described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value)", "``None``, Django enables the default time zone. \"\"\" def __init__(self, timezone): self.timezone =", "is None def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in a given", "thread. This is a context manager that uses ``~django.utils.timezone.activate()`` to set the timezone", "a time zone name. If it is a time zone name, pytz is", "``timezone``. \"\"\" try: # for pytz timezones return timezone.zone except AttributeError: # for", "\"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\" try:", "\"\"\" Checks if value is a datetime and converts it to local time", "time, just install pytz. \"\"\" def __init__(self): # This code is moved in", "self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt", "time zone as a tzinfo instance. This is the time zone defined by", "and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines if a given datetime.datetime", "function exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of", "(settings.USE_TZ if use_tz is None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time',", "else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return", "\"\"\" try: # for pytz timezones return timezone.zone except AttributeError: # for regular", "else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert", "pytz isn't available, and most likely inaccurate. If you're having trouble with this", "zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\"", "time implementation taken from Python's docs. Used only when pytz isn't available, and", "deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None:", "timezone.localize(value, is_dst=None) else: # may be wrong around DST changes return value.replace(tzinfo=timezone) def", "timezone.normalize(value) return value def now(): \"\"\" Returns an aware or naive datetime.datetime, depending", "= pytz.utc if pytz else UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\"", "time.tzset() # because it isn't thread safe. def activate(timezone): \"\"\" Sets the time", "self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return", "is_aware(value): \"\"\" Determines if a given datetime.datetime is aware. The logic is described", "# timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now()", "tzinfo subclass or a time zone name. If it is a time zone", "is designed for use by the template engine. \"\"\" should_convert = (isinstance(value, datetime)", "a time zone name, pytz is required. If it is ``None``, Django enables", "return datetime.now() # By design, these four functions don't perform any checks on", "_time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if pytz else UTC() \"\"\"UTC", "override(object): \"\"\" Temporarily set the time zone for the current thread. This is", "'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local", "functions. # These functions don't change os.environ['TZ'] and call time.tzset() # because it", "LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's docs. Used only when pytz", "\"\"\" from datetime import datetime, timedelta, tzinfo from threading import local import time", "self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF", "= timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def", "UTC and local time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation", "entry, and restores the previously active timezone on exit. The ``timezone`` argument must", "= timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone is None:", "and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value # Utilities def", "time zone, unless another time zone is specified. \"\"\" if timezone is None:", "like None. def is_aware(value): \"\"\" Determines if a given datetime.datetime is aware. The", "local time. Local time is defined by the current time zone, unless another", "may be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes", "= timezone.normalize(value) return value def now(): \"\"\" Returns an aware or naive datetime.datetime,", "is naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo", "pytz is required. If it is ``None``, Django enables the default time zone.", "return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second,", "http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone):", "None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if", "pytz = None from django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone',", "a datetime and converts it to local time if necessary. If use_tz is", "dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst >", "by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily set", "Returns the currently active time zone as a tzinfo instance. \"\"\" return getattr(_active,", "a tzinfo subclass or a time zone name. If it is a time", "in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is None", "zone for the current thread. The ``timezone`` argument must be an instance of", "in a function and cache the result. _localtime = None def get_default_timezone(): \"\"\"", "time zones return timezone.localize(value, is_dst=None) else: # may be wrong around DST changes", "def get_current_timezone_name(): \"\"\" Returns the name of the currently active time zone. \"\"\"", "_localtime # This function exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns", "manager that uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and restores the", "not None def is_naive(value): \"\"\" Determines if a given datetime.datetime is naive. The", "dt): return ZERO def tzname(self, dt): return \"UTC\" def dst(self, dt): return ZERO", "dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)]", "Local time is defined by the current time zone, unless another time zone", "available and fallbacks when it isn't. \"\"\" from datetime import datetime, timedelta, tzinfo", "This is a context manager that uses ``~django.utils.timezone.activate()`` to set the timezone on", "self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone", "timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs. Used only when", "default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns", "# This code is moved in __init__ to execute it as late as", "tzinfo from threading import local import time as _time try: import pytz except", "\"\"\" Unsets the time zone for the current thread. Django will then use", "of ``timezone``. \"\"\" try: # for pytz timezones return timezone.zone except AttributeError: #", "datetime, timedelta, tzinfo from threading import local import time as _time try: import", "If it is a time zone name, pytz is required. \"\"\" if isinstance(timezone,", "is not None def is_naive(value): \"\"\" Determines if a given datetime.datetime is naive.", "as _time try: import pytz except ImportError: pytz = None from django.conf import", "when it's available and fallbacks when it isn't. \"\"\" from datetime import datetime,", "be an instance of a tzinfo subclass or a time zone name. If", "if necessary. If use_tz is provided and is not None, that will force", "a naive datetime.datetime in a given time zone aware. \"\"\" if hasattr(timezone, 'localize'):", "for the current thread. This is a context manager that uses ``~django.utils.timezone.activate()`` to", "expression in a function and cache the result. _localtime = None def get_default_timezone():", "= local() def get_current_timezone(): \"\"\" Returns the currently active time zone as a", "functions don't perform any checks on their arguments. # The caller should ensure", ":func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz", "if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if", "aware. \"\"\" if hasattr(timezone, 'localize'): # available for pytz time zones return timezone.localize(value,", "it a time zone name, pytz is required. If it is ``None``, Django", "_active.value class override(object): \"\"\" Temporarily set the time zone for the current thread.", "code is moved in __init__ to execute it as late as possible #", "you're having trouble with this class, don't waste your time, just install pytz.", "\"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\"", "and converts it to local time if necessary. If use_tz is provided and", "value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines", "utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if", "ImportError: pytz = None from django.conf import settings __all__ = [ 'utc', 'get_default_timezone',", "it isn't. \"\"\" from datetime import datetime, timedelta, tzinfo from threading import local", "else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO", "'localize'): # available for pytz time zones return timezone.localize(value, is_dst=None) else: # may", "an aware datetime.datetime naive in a given time zone. \"\"\" value = value.astimezone(timezone)", "If use_tz is provided and is not None, that will force the value", "try: # for pytz timezones return timezone.zone except AttributeError: # for regular tzinfo", "# These functions don't change os.environ['TZ'] and call time.tzset() # because it isn't", "= LocalTimezone() return _localtime # This function exists for consistency with get_current_timezone_name def", "def get_default_timezone_name(): \"\"\" Returns the name of the default time zone. \"\"\" return", "provided and is not None, that will force the value to be converted", "checks on their arguments. # The caller should ensure that they don't receive", "as a tzinfo instance.\"\"\" # In order to avoid accessing the settings at", "force the value to be converted (or not), overriding the value of settings.USE_TZ.", "= self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks if", "timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET", "timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone is None: deactivate()", "avoid accessing the settings at compile time, # wrap the expression in a", "zone for the current thread. Django will then use the time zone defined", "return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else:", "def is_aware(value): \"\"\" Determines if a given datetime.datetime is aware. The logic is", "hasattr(timezone, 'localize'): # available for pytz time zones return timezone.localize(value, is_dst=None) else: #", "pytz when it's available and fallbacks when it isn't. \"\"\" from datetime import", "dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0", "= self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if", "return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's docs. Used", "Timezone selection functions. # These functions don't change os.environ['TZ'] and call time.tzset() #", "return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\"", "is_naive(value): \"\"\" Determines if a given datetime.datetime is naive. The logic is described", "converts it to local time if necessary. If use_tz is provided and is", "ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs. Used", "exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value = self.old_timezone else: del", "taken from Python's docs. Used only when pytz isn't available. \"\"\" def __repr__(self):", "else: # may be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone):", "= (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt)", "\"\"\" Returns the name of the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone())", "if a given datetime.datetime is aware. The logic is described in Python's docs:", "\"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns the currently active", "isinstance(timezone, basestring) and pytz is not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid", "def now(): \"\"\" Returns an aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\"", "the template engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is", "isn't thread safe. def activate(timezone): \"\"\" Sets the time zone for the current", "and restores the previously active timezone on exit. The ``timezone`` argument must be", "any checks on their arguments. # The caller should ensure that they don't", "they don't receive an invalid value like None. def is_aware(value): \"\"\" Determines if", "in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value) is", "zones return timezone.localize(value, is_dst=None) else: # may be wrong around DST changes return", "\"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def", "zone aware. \"\"\" if hasattr(timezone, 'localize'): # available for pytz time zones return", "import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware',", "return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four functions don't perform", "None def get_default_timezone(): \"\"\" Returns the default time zone as a tzinfo instance.", "is not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone)", "zone is specified. \"\"\" if timezone is None: timezone = get_current_timezone() value =", "value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones value = timezone.normalize(value)", "Python's docs. Used only when pytz isn't available, and most likely inaccurate. If", "of settings.USE_TZ. This function is designed for use by the template engine. \"\"\"", "the timezone on entry, and restores the previously active timezone on exit. The", "accessing the settings at compile time, # wrap the expression in a function", "in __init__ to execute it as late as possible # See get_default_timezone(). self.STDOFFSET", "time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class override(object):", "pytz.utc if pytz else UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\" #", "settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily set the", "required. If it is ``None``, Django enables the default time zone. \"\"\" def", "subclass or a time zone name. If it is a time zone name,", "and is not None, that will force the value to be converted (or", "value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines if a given datetime.datetime is", "wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware", "AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone", "\"\"\" Converts an aware datetime.datetime to local time. Local time is defined by", "dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst", "name. If it is a time zone name, pytz is required. \"\"\" if", "Sets the time zone for the current thread. The ``timezone`` argument must be", "\"\"\" UTC implementation taken from Python's docs. Used only when pytz isn't available.", "import datetime, timedelta, tzinfo from threading import local import time as _time try:", "ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month,", "a time zone name, pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value =", "datetime.datetime to local time. Local time is defined by the current time zone,", "default time zone. \"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active,", "\"\"\" Makes a naive datetime.datetime in a given time zone aware. \"\"\" if", "set the time zone for the current thread. This is a context manager", "instance of a ``tzinfo`` subclass, a time zone name, or ``None``. If is", "is ``None``, Django enables the default time zone. \"\"\" def __init__(self, timezone): self.timezone", "use the time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value", "else: return datetime.now() # By design, these four functions don't perform any checks", "= timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF =", "template engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None", "Local time implementation taken from Python's docs. Used only when pytz isn't available,", "by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None: if", "_localtime = None def get_default_timezone(): \"\"\" Returns the default time zone as a", "most likely inaccurate. If you're having trouble with this class, don't waste your", "safe. def activate(timezone): \"\"\" Sets the time zone for the current thread. The", "return self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt):", "the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if", "See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring)", "return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns the currently active time", "is a time zone name, pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value", "not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone) def", "def deactivate(): \"\"\" Unsets the time zone for the current thread. Django will", "The ``timezone`` argument must be an instance of a tzinfo subclass or a", "arguments. # The caller should ensure that they don't receive an invalid value", "aware datetime.datetime naive in a given time zone. \"\"\" value = value.astimezone(timezone) if", "of a ``tzinfo`` subclass, a time zone name, or ``None``. If is it", "not None: _active.value = self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None):", "The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None", "is not None, that will force the value to be converted (or not),", "at compile time, # wrap the expression in a function and cache the", "from threading import local import time as _time try: import pytz except ImportError:", "that they don't receive an invalid value like None. def is_aware(value): \"\"\" Determines", "naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is", "\"\"\" Determines if a given datetime.datetime is aware. The logic is described in", "make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive in a given time zone.", "settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return", "and local time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken", "# UTC and local time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC", "return \"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation", "if pytz else UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\" # In", "isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz is not None:", "or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that", "a given datetime.datetime is aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo", "dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's", "from Python's docs. Used only when pytz isn't available, and most likely inaccurate.", "an invalid value like None. def is_aware(value): \"\"\" Determines if a given datetime.datetime", "on exit. The ``timezone`` argument must be an instance of a ``tzinfo`` subclass,", "dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt =", "helper functions. This module uses pytz when it's available and fallbacks when it", "functions don't change os.environ['TZ'] and call time.tzset() # because it isn't thread safe.", "(or not), overriding the value of settings.USE_TZ. This function is designed for use", "= value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones value =", "time zones value = timezone.normalize(value) return value def now(): \"\"\" Returns an aware", "'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local time", "if hasattr(timezone, 'localize'): # available for pytz time zones return timezone.localize(value, is_dst=None) else:", "return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive in a", "# See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET", "not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This", "available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self,", "None, that will force the value to be converted (or not), overriding the", "None def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in a given time", "perform any checks on their arguments. # The caller should ensure that they", "\"\"\" global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is", "result. _localtime = None def get_default_timezone(): \"\"\" Returns the default time zone as", "basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone()", "time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``.", "Checks if value is a datetime and converts it to local time if", "should ensure that they don't receive an invalid value like None. def is_aware(value):", "invalid value like None. def is_aware(value): \"\"\" Determines if a given datetime.datetime is", "and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return", "'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local time zones", "is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value)", "a given time zone aware. \"\"\" if hasattr(timezone, 'localize'): # available for pytz", "def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt):", "design, these four functions don't perform any checks on their arguments. # The", "don't change os.environ['TZ'] and call time.tzset() # because it isn't thread safe. def", "else: _localtime = LocalTimezone() return _localtime # This function exists for consistency with", "# Timezone selection functions. # These functions don't change os.environ['TZ'] and call time.tzset()", "thread safe. def activate(timezone): \"\"\" Sets the time zone for the current thread.", "it is ``None``, Django enables the default time zone. \"\"\" def __init__(self, timezone):", "unless another time zone is specified. \"\"\" if timezone is None: timezone =", "current thread. This is a context manager that uses ``~django.utils.timezone.activate()`` to set the", "\"\"\" if hasattr(timezone, 'localize'): # available for pytz time zones return timezone.localize(value, is_dst=None)", "another time zone is specified. \"\"\" if timezone is None: timezone = get_current_timezone()", "_time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if pytz", "is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is", "True)) return localtime(value) if should_convert else value # Utilities def localtime(value, timezone=None): \"\"\"", "\"value\"): del _active.value class override(object): \"\"\" Temporarily set the time zone for the", "use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else", "In order to avoid accessing the settings at compile time, # wrap the", "or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in", "current time zone, unless another time zone is specified. \"\"\" if timezone is", "raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets the time zone", "described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is", "engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else", "an aware datetime.datetime to local time. Local time is defined by the current", "None: _active.value = self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\"", "thread. The ``timezone`` argument must be an instance of a tzinfo subclass or", "tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name", "zone as a tzinfo instance.\"\"\" # In order to avoid accessing the settings", "= None def get_default_timezone(): \"\"\" Returns the default time zone as a tzinfo", "name, pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone,", "thread. Django will then use the time zone defined by settings.TIME_ZONE. \"\"\" if", "def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0,", "from datetime import datetime, timedelta, tzinfo from threading import local import time as", "instance.\"\"\" # In order to avoid accessing the settings at compile time, #", "Determines if a given datetime.datetime is aware. The logic is described in Python's", "else: raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets the time", "possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else:", "argument must be an instance of a ``tzinfo`` subclass, a time zone name,", "not None, that will force the value to be converted (or not), overriding", "time as _time try: import pytz except ImportError: pytz = None from django.conf", "ZERO def tzname(self, dt): return \"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo):", "def template_localtime(value, use_tz=None): \"\"\" Checks if value is a datetime and converts it", "specified. \"\"\" if timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone) if", "value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in a", "UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs. Used only when pytz isn't", "time zone name, or ``None``. If is it a time zone name, pytz", "pytz else UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\" # In order", "# Utilities def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to local time.", "cache the result. _localtime = None def get_default_timezone(): \"\"\" Returns the default time", "late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET =", "to local time if necessary. If use_tz is provided and is not None,", "\"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz is", "don't perform any checks on their arguments. # The caller should ensure that", "class, don't waste your time, just install pytz. \"\"\" def __init__(self): # This", "name of ``timezone``. \"\"\" try: # for pytz timezones return timezone.zone except AttributeError:", "now(): \"\"\" Returns an aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if", "is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime #", "naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc)", "instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of", "traceback): if self.old_timezone is not None: _active.value = self.old_timezone else: del _active.value #", "0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc =", "make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in a given time zone aware.", "return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year,", "also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and", "def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value = self.old_timezone", "# for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection", "is None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value)", "and most likely inaccurate. If you're having trouble with this class, don't waste", "is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE)", "is a context manager that uses ``~django.utils.timezone.activate()`` to set the timezone on entry,", "datetime.datetime is naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return", "else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt =", "tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp =", "(dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt", "time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns the", "available for pytz time zones value = timezone.normalize(value) return value def now(): \"\"\"", "available for pytz time zones return timezone.localize(value, is_dst=None) else: # may be wrong", "pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO", "# This function exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the", "If it is ``None``, Django enables the default time zone. \"\"\" def __init__(self,", "changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive in", "datetime) and (settings.USE_TZ if use_tz is None else use_tz) and not is_naive(value) and", "don't receive an invalid value like None. def is_aware(value): \"\"\" Determines if a", "dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute,", "UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\" # In order to avoid", "[ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] #", "active time zone as a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def", "del _active.value class override(object): \"\"\" Temporarily set the time zone for the current", "self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value", "Converts an aware datetime.datetime to local time. Local time is defined by the", "except AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) #", "\"\"\" if timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone,", "Django will then use the time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active,", "def activate(timezone): \"\"\" Sets the time zone for the current thread. The ``timezone``", "self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self,", "return tt.tm_isdst > 0 utc = pytz.utc if pytz else UTC() \"\"\"UTC time", "time if necessary. If use_tz is provided and is not None, that will", "return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of the currently", "of a tzinfo subclass or a time zone name. If it is a", "moved in __init__ to execute it as late as possible # See get_default_timezone().", "aware datetime.datetime to local time. Local time is defined by the current time", "= None from django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate',", "if value is a datetime and converts it to local time if necessary.", "Returns the name of ``timezone``. \"\"\" try: # for pytz timezones return timezone.zone", "the value to be converted (or not), overriding the value of settings.USE_TZ. This", "\"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self, dt): return \"UTC\" def dst(self,", "os.environ['TZ'] and call time.tzset() # because it isn't thread safe. def activate(timezone): \"\"\"", "This code is moved in __init__ to execute it as late as possible", "timezone.zone except AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now)", "_active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\"", "function is designed for use by the template engine. \"\"\" should_convert = (isinstance(value,", "the time zone for the current thread. This is a context manager that", "timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz", "timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone", "is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four", "uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and restores the previously active", "wrap the expression in a function and cache the result. _localtime = None", "class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's docs. Used only when", "naive datetime.datetime in a given time zone aware. \"\"\" if hasattr(timezone, 'localize'): #", "tzinfo instance. This is the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`.", "the current thread. The ``timezone`` argument must be an instance of a tzinfo", "The caller should ensure that they don't receive an invalid value like None.", "datetime.datetime is aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return", "currently active time zone as a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone())", "deactivate(): \"\"\" Unsets the time zone for the current thread. Django will then", "for the current thread. The ``timezone`` argument must be an instance of a", "0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc", "context manager that uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and restores", "current thread. Django will then use the time zone defined by settings.TIME_ZONE. \"\"\"", "pytz is not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" %", "value to be converted (or not), overriding the value of settings.USE_TZ. This function", "by the current time zone, unless another time zone is specified. \"\"\" if", "time zone for the current thread. The ``timezone`` argument must be an instance", "time. Local time is defined by the current time zone, unless another time", "'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local time zones ZERO =", "This is the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global", "in a given time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): #", "timedelta, tzinfo from threading import local import time as _time try: import pytz", "datetime import datetime, timedelta, tzinfo from threading import local import time as _time", "def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\" try: # for pytz", "utc = pytz.utc if pytz else UTC() \"\"\"UTC time zone as a tzinfo", "_localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None:", "an instance of a ``tzinfo`` subclass, a time zone name, or ``None``. If", "local() def get_current_timezone(): \"\"\" Returns the currently active time zone as a tzinfo", "datetime.datetime naive in a given time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone,", "on their arguments. # The caller should ensure that they don't receive an", "a function and cache the result. _localtime = None def get_default_timezone(): \"\"\" Returns", "def is_naive(value): \"\"\" Determines if a given datetime.datetime is naive. The logic is", "time zone name. If it is a time zone name, pytz is required.", "# for pytz timezones return timezone.zone except AttributeError: # for regular tzinfo objects", "zone. \"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None)", "def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value,", "is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value # Utilities", "module uses pytz when it's available and fallbacks when it isn't. \"\"\" from", "\"\"\" Makes an aware datetime.datetime naive in a given time zone. \"\"\" value", "on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower", "time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz", "time zone is specified. \"\"\" if timezone is None: timezone = get_current_timezone() value", "None) def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type,", "get_current_timezone_name(): \"\"\" Returns the name of the currently active time zone. \"\"\" return", "] # UTC and local time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\"", "Temporarily set the time zone for the current thread. This is a context", "docs. Used only when pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def", "_active.value = self.old_timezone else: del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks", "a ``tzinfo`` subclass, a time zone name, or ``None``. If is it a", "\"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones", "If is it a time zone name, pytz is required. If it is", "current thread. The ``timezone`` argument must be an instance of a tzinfo subclass", "the time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class", "use_tz is provided and is not None, that will force the value to", "get_current_timezone(): \"\"\" Returns the currently active time zone as a tzinfo instance. \"\"\"", "Django enables the default time zone. \"\"\" def __init__(self, timezone): self.timezone = timezone", "timezone=None): \"\"\" Converts an aware datetime.datetime to local time. Local time is defined", "as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone)", "# available for pytz time zones value = timezone.normalize(value) return value def now():", "as a tzinfo instance. This is the time zone defined by settings.TIME_ZONE. See", "use_tz=None): \"\"\" Checks if value is a datetime and converts it to local", "else UTC() \"\"\"UTC time zone as a tzinfo instance.\"\"\" # In order to", "threading import local import time as _time try: import pytz except ImportError: pytz", "four functions don't perform any checks on their arguments. # The caller should", "def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive in a given time", "Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not", "when pytz isn't available, and most likely inaccurate. If you're having trouble with", "as late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET", "_localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This function exists", "value = timezone.normalize(value) return value def now(): \"\"\" Returns an aware or naive", "defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily", "= [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ]", "is required. If it is ``None``, Django enables the default time zone. \"\"\"", "time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from Python's", "functions. This module uses pytz when it's available and fallbacks when it isn't.", "'is_aware', 'make_aware', 'make_naive', ] # UTC and local time zones ZERO = timedelta(0)", "is specified. \"\"\" if timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone)", "__exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value = self.old_timezone else:", "by the template engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz", "local import time as _time try: import pytz except ImportError: pytz = None", "uses pytz when it's available and fallbacks when it isn't. \"\"\" from datetime", "these four functions don't perform any checks on their arguments. # The caller", "the result. _localtime = None def get_default_timezone(): \"\"\" Returns the default time zone", "docs. Used only when pytz isn't available, and most likely inaccurate. If you're", "activate(timezone): \"\"\" Sets the time zone for the current thread. The ``timezone`` argument", "consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of the default time", "argument must be an instance of a tzinfo subclass or a time zone", "is required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and", "is_dst=None) else: # may be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value,", "timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self):", "isn't. \"\"\" from datetime import datetime, timedelta, tzinfo from threading import local import", "%r\" % timezone) def deactivate(): \"\"\" Unsets the time zone for the current", "set the timezone on entry, and restores the previously active timezone on exit.", "datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is", "not), overriding the value of settings.USE_TZ. This function is designed for use by", "a given datetime.datetime is naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo", "instance of a tzinfo subclass or a time zone name. If it is", "return value def now(): \"\"\" Returns an aware or naive datetime.datetime, depending on", "the default time zone. \"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone =", "is provided and is not None, that will force the value to be", "and fallbacks when it isn't. \"\"\" from datetime import datetime, timedelta, tzinfo from", "overriding the value of settings.USE_TZ. This function is designed for use by the", "\"\"\" Returns an aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ:", "datetime.datetime in a given time zone aware. \"\"\" if hasattr(timezone, 'localize'): # available", "pytz time zones return timezone.localize(value, is_dst=None) else: # may be wrong around DST", "dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp) return", "the currently active time zone as a tzinfo instance. \"\"\" return getattr(_active, \"value\",", "\"\"\" Returns the default time zone as a tzinfo instance. This is the", "if timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'):", "Unsets the time zone for the current thread. Django will then use the", "pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime", "self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET", "def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to local time. Local time", "for pytz time zones value = timezone.normalize(value) return value def now(): \"\"\" Returns", "timezone) def deactivate(): \"\"\" Unsets the time zone for the current thread. Django", "for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of the default", "it isn't thread safe. def activate(timezone): \"\"\" Sets the time zone for the", "__init__ to execute it as late as possible # See get_default_timezone(). self.STDOFFSET =", "tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if pytz else", "with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of the default time zone.", "and (settings.USE_TZ if use_tz is None else use_tz) and not is_naive(value) and getattr(value,", "> 0 utc = pytz.utc if pytz else UTC() \"\"\"UTC time zone as", "naive in a given time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'):", "try: import pytz except ImportError: pytz = None from django.conf import settings __all__", "None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate():", "ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets the time zone for", "= pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets", "= get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time", "# wrap the expression in a function and cache the result. _localtime =", "docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None", "http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None def", "if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def", "_localtime = LocalTimezone() return _localtime # This function exists for consistency with get_current_timezone_name", "import local import time as _time try: import pytz except ImportError: pytz =", "See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET =", "self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt):", "compile time, # wrap the expression in a function and cache the result.", "\"\"\" def __init__(self): # This code is moved in __init__ to execute it", "must be an instance of a ``tzinfo`` subclass, a time zone name, or", "localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to local time. Local time is", "# The caller should ensure that they don't receive an invalid value like", "that uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and restores the previously", "# may be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\"", "Returns the name of the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def", "if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return", "Python's docs. Used only when pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\"", "DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive", "class override(object): \"\"\" Temporarily set the time zone for the current thread. This", "If you're having trouble with this class, don't waste your time, just install", "localtime(value) if should_convert else value # Utilities def localtime(value, timezone=None): \"\"\" Converts an", "\"\"\" Temporarily set the time zone for the current thread. This is a", "global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not", "as a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns", "self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\"", "exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of the", "having trouble with this class, don't waste your time, just install pytz. \"\"\"", "# available for pytz time zones return timezone.localize(value, is_dst=None) else: # may be", "the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\"", "def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt):", "ensure that they don't receive an invalid value like None. def is_aware(value): \"\"\"", "\"\"\" if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily set the time", "if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET -", "value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes a", "required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz", "else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value", "waste your time, just install pytz. \"\"\" def __init__(self): # This code is", "a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the", "value # Utilities def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to local", "timezone): \"\"\" Makes an aware datetime.datetime naive in a given time zone. \"\"\"", "pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets the", "exc_value, traceback): if self.old_timezone is not None: _active.value = self.old_timezone else: del _active.value", "def get_default_timezone(): \"\"\" Returns the default time zone as a tzinfo instance. This", "value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime naive in a given", "only when pytz isn't available, and most likely inaccurate. If you're having trouble", "getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value # Utilities def localtime(value,", "and call time.tzset() # because it isn't thread safe. def activate(timezone): \"\"\" Sets", "given datetime.datetime is naive. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\"", "the current thread. This is a context manager that uses ``~django.utils.timezone.activate()`` to set", "'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and local time zones ZERO", "the name of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local()", "and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value", "get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones", "is aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo", "only when pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt):", "zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns the currently", "# because it isn't thread safe. def activate(timezone): \"\"\" Sets the time zone", "\"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else use_tz)", "datetime and converts it to local time if necessary. If use_tz is provided", "in a given time zone aware. \"\"\" if hasattr(timezone, 'localize'): # available for", "self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self,", "logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None", "(isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else use_tz) and not is_naive(value)", "get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of the currently active time zone.", "_get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\" try: # for", "this class, don't waste your time, just install pytz. \"\"\" def __init__(self): #", "__repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self, dt): return \"UTC\"", "name of the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\"", "then use the time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del", "zone as a tzinfo instance. This is the time zone defined by settings.TIME_ZONE.", "\"\"\" Returns the name of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active", "def get_current_timezone(): \"\"\" Returns the currently active time zone as a tzinfo instance.", "the name of the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone):", "zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\"", "value def now(): \"\"\" Returns an aware or naive datetime.datetime, depending on settings.USE_TZ.", "settings at compile time, # wrap the expression in a function and cache", "the time zone for the current thread. Django will then use the time", "return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes", "\"\"\" Sets the time zone for the current thread. The ``timezone`` argument must", "timezone on exit. The ``timezone`` argument must be an instance of a ``tzinfo``", "given datetime.datetime is aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\"", "zone name, or ``None``. If is it a time zone name, pytz is", "the default time zone as a tzinfo instance. This is the time zone", "if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz is not", "_active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value is a datetime", "hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily set the time zone for", "24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four functions", "except ImportError: pytz = None from django.conf import settings __all__ = [ 'utc',", "= pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This function exists for", "'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC", "don't waste your time, just install pytz. \"\"\" def __init__(self): # This code", "None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not", "Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value is a datetime and converts", "if a given datetime.datetime is naive. The logic is described in Python's docs:", "when pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return", "ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's docs. Used only", "order to avoid accessing the settings at compile time, # wrap the expression", "pytz time zones value = timezone.normalize(value) return value def now(): \"\"\" Returns an", "implementation taken from Python's docs. Used only when pytz isn't available. \"\"\" def", "'make_naive', ] # UTC and local time zones ZERO = timedelta(0) class UTC(tzinfo):", "for use by the template engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ", "zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is", "activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback): if self.old_timezone is not None: _active.value =", "the currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the", "Returns the default time zone as a tzinfo instance. This is the time", "The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not", "zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs.", "self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone is", "def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from", "\"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc)", "it is a time zone name, pytz is required. \"\"\" if isinstance(timezone, tzinfo):", "aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows", "None def is_naive(value): \"\"\" Determines if a given datetime.datetime is naive. The logic", "datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These functions don't change os.environ['TZ']", "logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or", "zones value = timezone.normalize(value) return value def now(): \"\"\" Returns an aware or", "return ZERO def tzname(self, dt): return \"UTC\" def dst(self, dt): return ZERO class", "if use_tz is None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True))", "These functions don't change os.environ['TZ'] and call time.tzset() # because it isn't thread", "get_default_timezone_name(): \"\"\" Returns the name of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone())", "UTC implementation taken from Python's docs. Used only when pytz isn't available. \"\"\"", "# In order to avoid accessing the settings at compile time, # wrap", "\"\"\"Timezone helper functions. This module uses pytz when it's available and fallbacks when", "use by the template engine. \"\"\" should_convert = (isinstance(value, datetime) and (settings.USE_TZ if", "likely inaccurate. If you're having trouble with this class, don't waste your time,", "it as late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight:", "dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp", "This module uses pytz when it's available and fallbacks when it isn't. \"\"\"", "depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24%", "isn't available, and most likely inaccurate. If you're having trouble with this class,", "\"\"\" Local time implementation taken from Python's docs. Used only when pytz isn't", "if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime", "return timezone.zone except AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone) return", "def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime in a given time zone", "= timezone elif isinstance(timezone, basestring) and pytz is not None: _active.value = pytz.timezone(timezone)", "must be an instance of a tzinfo subclass or a time zone name.", "``timezone`` argument must be an instance of a ``tzinfo`` subclass, a time zone", "_get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\" try: # for pytz timezones", "converted (or not), overriding the value of settings.USE_TZ. This function is designed for", "given time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for", "Makes an aware datetime.datetime naive in a given time zone. \"\"\" value =", "settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None: if isinstance(settings.TIME_ZONE,", "a given time zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available", "= getattr(_active, 'value', None) def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone)", "def utcoffset(self, dt): return ZERO def tzname(self, dt): return \"UTC\" def dst(self, dt):", "active timezone on exit. The ``timezone`` argument must be an instance of a", "just install pytz. \"\"\" def __init__(self): # This code is moved in __init__", "_time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(),", "LocalTimezone() return _localtime # This function exists for consistency with get_current_timezone_name def get_default_timezone_name():", "utcoffset(self, dt): return ZERO def tzname(self, dt): return \"UTC\" def dst(self, dt): return", "datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these", "for the current thread. Django will then use the time zone defined by", "By design, these four functions don't perform any checks on their arguments. #", "is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is not None and", "to avoid accessing the settings at compile time, # wrap the expression in", "= timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs. Used only", "a time zone name, or ``None``. If is it a time zone name,", "to be converted (or not), overriding the value of settings.USE_TZ. This function is", "django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive',", "around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an aware datetime.datetime", "taken from Python's docs. Used only when pytz isn't available, and most likely", "None and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines if a given", "\"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken", "defined by the current time zone, unless another time zone is specified. \"\"\"", "when it isn't. \"\"\" from datetime import datetime, timedelta, tzinfo from threading import", "dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def dst(self, dt): if self._isdst(dt):", "None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This function", "dt): return \"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time", "dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local time implementation taken from Python's docs.", "if should_convert else value # Utilities def localtime(value, timezone=None): \"\"\" Converts an aware", "the settings at compile time, # wrap the expression in a function and", "time zone as a tzinfo instance.\"\"\" # In order to avoid accessing the", "hasattr(timezone, 'normalize'): # available for pytz time zones value = timezone.normalize(value) return value.replace(tzinfo=None)", "local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These functions don't", "timezone.tzname(local_now) # Timezone selection functions. # These functions don't change os.environ['TZ'] and call", "timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() #", "default time zone as a tzinfo instance. This is the time zone defined", "if self.old_timezone is not None: _active.value = self.old_timezone else: del _active.value # Templates", "datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four functions don't perform any", "zone for the current thread. This is a context manager that uses ``~django.utils.timezone.activate()``", "tzname(self, dt): return \"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\" Local", "to set the timezone on entry, and restores the previously active timezone on", "instance. This is the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\"", "\"\"\" Determines if a given datetime.datetime is naive. The logic is described in", "restores the previously active timezone on exit. The ``timezone`` argument must be an", "get_default_timezone(): \"\"\" Returns the default time zone as a tzinfo instance. This is", "time zone for the current thread. Django will then use the time zone", "= (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else use_tz) and not", "if _localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime", "time zone. \"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value',", "value like None. def is_aware(value): \"\"\" Determines if a given datetime.datetime is aware.", "shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By", "'make_aware', 'make_naive', ] # UTC and local time zones ZERO = timedelta(0) class", "that will force the value to be converted (or not), overriding the value", "and pytz is not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone: %r\"", "will then use the time zone defined by settings.TIME_ZONE. \"\"\" if hasattr(_active, \"value\"):", "The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a time", "timezone is None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): #", "= _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if pytz else UTC()", "for pytz time zones return timezone.localize(value, is_dst=None) else: # may be wrong around", "# By design, these four functions don't perform any checks on their arguments.", "execute it as late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if", "your time, just install pytz. \"\"\" def __init__(self): # This code is moved", "zone, unless another time zone is specified. \"\"\" if timezone is None: timezone", "be wrong around DST changes return value.replace(tzinfo=timezone) def make_naive(value, timezone): \"\"\" Makes an", "\"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of the currently active time", "to local time. Local time is defined by the current time zone, unless", "Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def", "% timezone) def deactivate(): \"\"\" Unsets the time zone for the current thread.", "defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime is None:", "\"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self, dt):", "to execute it as late as possible # See get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone)", "timezone elif isinstance(timezone, basestring) and pytz is not None: _active.value = pytz.timezone(timezone) else:", "name of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def", "_get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone(): \"\"\" Returns the currently active time zone", "self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF", "time, # wrap the expression in a function and cache the result. _localtime", "should_convert = (isinstance(value, datetime) and (settings.USE_TZ if use_tz is None else use_tz) and", "dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0) stamp = _time.mktime(tt) tt = _time.localtime(stamp)", "__repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET", "self.old_timezone is not None: _active.value = self.old_timezone else: del _active.value # Templates def", "self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self,", "an instance of a tzinfo subclass or a time zone name. If it", "template_localtime(value, use_tz=None): \"\"\" Checks if value is a datetime and converts it to", "if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else:", "the name of ``timezone``. \"\"\" try: # for pytz timezones return timezone.zone except", "'value', None) def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self,", "aware. The logic is described in Python's docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is", "timezone on entry, and restores the previously active timezone on exit. The ``timezone``", "pytz timezones return timezone.zone except AttributeError: # for regular tzinfo objects local_now =", "pytz. \"\"\" def __init__(self): # This code is moved in __init__ to execute", "dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def tzname(self, dt): return", "active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of", "not None and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines if a", "Used only when pytz isn't available, and most likely inaccurate. If you're having", "a tzinfo instance.\"\"\" # In order to avoid accessing the settings at compile", "with this class, don't waste your time, just install pytz. \"\"\" def __init__(self):", "_active.value = timezone elif isinstance(timezone, basestring) and pytz is not None: _active.value =", "Used only when pytz isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self,", "for regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions.", "None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else:", "be an instance of a ``tzinfo`` subclass, a time zone name, or ``None``.", "# Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value is a datetime and", "is defined by the current time zone, unless another time zone is specified.", "enables the default time zone. \"\"\" def __init__(self, timezone): self.timezone = timezone self.old_timezone", "for pytz timezones return timezone.zone except AttributeError: # for regular tzinfo objects local_now", "``~django.utils.timezone.activate()`` to set the timezone on entry, and restores the previously active timezone", "__init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if", "docs: http://docs.python.org/library/datetime.html#datetime.tzinfo \"\"\" return value.tzinfo is None or value.tzinfo.utcoffset(value) is None def make_aware(value,", "0 utc = pytz.utc if pytz else UTC() \"\"\"UTC time zone as a", "def tzname(self, dt): return \"UTC\" def dst(self, dt): return ZERO class LocalTimezone(tzinfo): \"\"\"", "_localtime is None: if isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime =", "settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware',", "local time if necessary. If use_tz is provided and is not None, that", "the current time zone, unless another time zone is specified. \"\"\" if timezone", "Makes a naive datetime.datetime in a given time zone aware. \"\"\" if hasattr(timezone,", "zone name, pytz is required. If it is ``None``, Django enables the default", "the expression in a function and cache the result. _localtime = None def", "return \"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self, dt): return \"UTC\" def", "previously active timezone on exit. The ``timezone`` argument must be an instance of", "\"\"\" Returns the currently active time zone as a tzinfo instance. \"\"\" return", "basestring) and pytz is not None: _active.value = pytz.timezone(timezone) else: raise ValueError(\"Invalid timezone:", "it to local time if necessary. If use_tz is provided and is not", "slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design, these four functions don't", "if hasattr(timezone, 'normalize'): # available for pytz time zones value = timezone.normalize(value) return", "self.old_timezone = getattr(_active, 'value', None) def __enter__(self): if self.timezone is None: deactivate() else:", "def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return", "timezone): \"\"\" Makes a naive datetime.datetime in a given time zone aware. \"\"\"", "value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time zones value", "necessary. If use_tz is provided and is not None, that will force the", "time zone as a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name():", "None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes a naive datetime.datetime", "``tzinfo`` subclass, a time zone name, or ``None``. If is it a time", "Utilities def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to local time. Local", "zone. \"\"\" value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for pytz time", "currently active time zone. \"\"\" return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name", "tt.tm_isdst > 0 utc = pytz.utc if pytz else UTC() \"\"\"UTC time zone", "get_default_timezone(). self.STDOFFSET = timedelta(seconds=-_time.timezone) if _time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET", "the previously active timezone on exit. The ``timezone`` argument must be an instance", "should_convert else value # Utilities def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime", "__enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def __exit__(self, exc_type, exc_value, traceback):", "local time zones ZERO = timedelta(0) class UTC(tzinfo): \"\"\" UTC implementation taken from", "and cache the result. _localtime = None def get_default_timezone(): \"\"\" Returns the default", "is a datetime and converts it to local time if necessary. If use_tz", "settings.USE_TZ. This function is designed for use by the template engine. \"\"\" should_convert", "an aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit", "value is a datetime and converts it to local time if necessary. If", "settings.USE_TZ. \"\"\" if settings.USE_TZ: # timeit shows that datetime.now(tz=utc) is 24% slower return", "_active = local() def get_current_timezone(): \"\"\" Returns the currently active time zone as", "return localtime(value) if should_convert else value # Utilities def localtime(value, timezone=None): \"\"\" Converts", "tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else:", "isinstance(settings.TIME_ZONE, basestring) and pytz is not None: _localtime = pytz.timezone(settings.TIME_ZONE) else: _localtime =", "datetime.now() # By design, these four functions don't perform any checks on their", "def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO def tzname(self, dt): return", "Determines if a given datetime.datetime is naive. The logic is described in Python's", "``timezone`` argument must be an instance of a tzinfo subclass or a time", "= _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc if", "receive an invalid value like None. def is_aware(value): \"\"\" Determines if a given", "return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return self.DSTOFFSET else: return self.STDOFFSET def", "del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value is a", "tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These", "= datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These functions don't change", "tzinfo): _active.value = timezone elif isinstance(timezone, basestring) and pytz is not None: _active.value", "value of settings.USE_TZ. This function is designed for use by the template engine.", "stamp = _time.mktime(tt) tt = _time.localtime(stamp) return tt.tm_isdst > 0 utc = pytz.utc", "the current thread. Django will then use the time zone defined by settings.TIME_ZONE.", "self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt):", "'normalize'): # available for pytz time zones value = timezone.normalize(value) return value def", "designed for use by the template engine. \"\"\" should_convert = (isinstance(value, datetime) and", "pytz except ImportError: pytz = None from django.conf import settings __all__ = [", "getattr(_active, 'value', None) def __enter__(self): if self.timezone is None: deactivate() else: activate(self.timezone) def", "\"\"\" return value.tzinfo is not None and value.tzinfo.utcoffset(value) is not None def is_naive(value):", "trouble with this class, don't waste your time, just install pytz. \"\"\" def", "is None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available", "the value of settings.USE_TZ. This function is designed for use by the template", "pytz.timezone(settings.TIME_ZONE) else: _localtime = LocalTimezone() return _localtime # This function exists for consistency", "zone name, pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif", "not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return localtime(value) if should_convert else value #", "the time zone for the current thread. The ``timezone`` argument must be an", "return _localtime # This function exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\"", "def __init__(self): # This code is moved in __init__ to execute it as", "is the time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime", "exit. The ``timezone`` argument must be an instance of a ``tzinfo`` subclass, a", "This function exists for consistency with get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name", "get_current_timezone_name def get_default_timezone_name(): \"\"\" Returns the name of the default time zone. \"\"\"", "tzname(self, dt): return _time.tzname[self._isdst(dt)] def _isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour,", "__init__(self): # This code is moved in __init__ to execute it as late", "\"\"\" Returns the name of ``timezone``. \"\"\" try: # for pytz timezones return", "time zone defined by settings.TIME_ZONE. See also :func:`get_current_timezone`. \"\"\" global _localtime if _localtime", "zone as a tzinfo instance. \"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\"", "- self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def utcoffset(self, dt): if self._isdst(dt): return", "time zone aware. \"\"\" if hasattr(timezone, 'localize'): # available for pytz time zones", "return _get_timezone_name(get_current_timezone()) def _get_timezone_name(timezone): \"\"\" Returns the name of ``timezone``. \"\"\" try: #", "tzinfo instance.\"\"\" # In order to avoid accessing the settings at compile time,", "hasattr(timezone, 'normalize'): # available for pytz time zones value = timezone.normalize(value) return value", "is None or value.tzinfo.utcoffset(value) is None def make_aware(value, timezone): \"\"\" Makes a naive", "return timezone.localize(value, is_dst=None) else: # may be wrong around DST changes return value.replace(tzinfo=timezone)", "is not None and value.tzinfo.utcoffset(value) is not None def is_naive(value): \"\"\" Determines if", "time zone name, pytz is required. If it is ``None``, Django enables the", "change os.environ['TZ'] and call time.tzset() # because it isn't thread safe. def activate(timezone):", "or ``None``. If is it a time zone name, pytz is required. If", "import pytz except ImportError: pytz = None from django.conf import settings __all__ =", "of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active = local() def get_current_timezone():", "Returns the name of the default time zone. \"\"\" return _get_timezone_name(get_default_timezone()) _active =", "timezones return timezone.zone except AttributeError: # for regular tzinfo objects local_now = datetime.now(timezone)", "= self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self) def __repr__(self): return \"<LocalTimezone>\" def", "pytz is required. \"\"\" if isinstance(timezone, tzinfo): _active.value = timezone elif isinstance(timezone, basestring)", "return self.STDOFFSET def dst(self, dt): if self._isdst(dt): return self.DSTDIFF else: return ZERO def", "'convert_to_local_time', True)) return localtime(value) if should_convert else value # Utilities def localtime(value, timezone=None):", "class UTC(tzinfo): \"\"\" UTC implementation taken from Python's docs. Used only when pytz", "subclass, a time zone name, or ``None``. If is it a time zone", "timezone: %r\" % timezone) def deactivate(): \"\"\" Unsets the time zone for the", "Returns an aware or naive datetime.datetime, depending on settings.USE_TZ. \"\"\" if settings.USE_TZ: #", "regular tzinfo objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. #", "\"\"\" return getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of the", "is moved in __init__ to execute it as late as possible # See", "return timezone.tzname(local_now) # Timezone selection functions. # These functions don't change os.environ['TZ'] and", "selection functions. # These functions don't change os.environ['TZ'] and call time.tzset() # because", "from django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override',", "elif isinstance(timezone, basestring) and pytz is not None: _active.value = pytz.timezone(timezone) else: raise", "a tzinfo instance. This is the time zone defined by settings.TIME_ZONE. See also", "if hasattr(_active, \"value\"): del _active.value class override(object): \"\"\" Temporarily set the time zone", "time is defined by the current time zone, unless another time zone is", "import time as _time try: import pytz except ImportError: pytz = None from", "'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive', ] # UTC and", "__all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate', 'override', 'is_naive', 'is_aware', 'make_aware', 'make_naive',", "None from django.conf import settings __all__ = [ 'utc', 'get_default_timezone', 'get_current_timezone', 'activate', 'deactivate',", "that datetime.now(tz=utc) is 24% slower return datetime.utcnow().replace(tzinfo=utc) else: return datetime.now() # By design,", "will force the value to be converted (or not), overriding the value of", "None. def is_aware(value): \"\"\" Determines if a given datetime.datetime is aware. The logic", "call time.tzset() # because it isn't thread safe. def activate(timezone): \"\"\" Sets the", "None: timezone = get_current_timezone() value = value.astimezone(timezone) if hasattr(timezone, 'normalize'): # available for", "else value # Utilities def localtime(value, timezone=None): \"\"\" Converts an aware datetime.datetime to", "def __init__(self, timezone): self.timezone = timezone self.old_timezone = getattr(_active, 'value', None) def __enter__(self):", "self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET tzinfo.__init__(self)", "zone name. If it is a time zone name, pytz is required. \"\"\"", "be converted (or not), overriding the value of settings.USE_TZ. This function is designed", "_time.daylight: self.DSTOFFSET = timedelta(seconds=-_time.altzone) else: self.DSTOFFSET = self.STDOFFSET self.DSTDIFF = self.DSTOFFSET - self.STDOFFSET", "\"\"\"UTC time zone as a tzinfo instance.\"\"\" # In order to avoid accessing", "on entry, and restores the previously active timezone on exit. The ``timezone`` argument", "objects local_now = datetime.now(timezone) return timezone.tzname(local_now) # Timezone selection functions. # These functions", "a context manager that uses ``~django.utils.timezone.activate()`` to set the timezone on entry, and", "_isdst(self, dt): tt = (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second, dt.weekday(), 0, 0)", "is it a time zone name, pytz is required. If it is ``None``,", "their arguments. # The caller should ensure that they don't receive an invalid", "use_tz is None else use_tz) and not is_naive(value) and getattr(value, 'convert_to_local_time', True)) return", "else: del _active.value # Templates def template_localtime(value, use_tz=None): \"\"\" Checks if value is", "install pytz. \"\"\" def __init__(self): # This code is moved in __init__ to", "given time zone aware. \"\"\" if hasattr(timezone, 'localize'): # available for pytz time", "it's available and fallbacks when it isn't. \"\"\" from datetime import datetime, timedelta,", "fallbacks when it isn't. \"\"\" from datetime import datetime, timedelta, tzinfo from threading", "isn't available. \"\"\" def __repr__(self): return \"<UTC>\" def utcoffset(self, dt): return ZERO def", "time zone for the current thread. This is a context manager that uses", "getattr(_active, \"value\", get_default_timezone()) def get_current_timezone_name(): \"\"\" Returns the name of the currently active", "name, pytz is required. If it is ``None``, Django enables the default time", "because it isn't thread safe. def activate(timezone): \"\"\" Sets the time zone for", "``None``. If is it a time zone name, pytz is required. If it", "name, or ``None``. If is it a time zone name, pytz is required." ]
[ "import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names", "= resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch', 'thredds_download', 'workflow'", "resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch', 'thredds_download', 'workflow' ]", "Service from pywps.tests import assert_response_success from .common import client_for from malleefowl.processes import processes", "test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings'", "= client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) ==", "import Service from pywps.tests import assert_response_success from .common import client_for from malleefowl.processes import", "from pywps import Service from pywps.tests import assert_response_success from .common import client_for from", "pywps import Service from pywps.tests import assert_response_success from .common import client_for from malleefowl.processes", "pywps.tests import assert_response_success from .common import client_for from malleefowl.processes import processes def test_wps_caps():", "import assert_response_success from .common import client_for from malleefowl.processes import processes def test_wps_caps(): client", "client_for from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps',", "version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch',", "import client_for from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp =", "names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download', 'esgsearch', 'thredds_download',", "from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities',", "processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names =", "client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process'", "from .common import client_for from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes))", "client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert", "<gh_stars>0 import pytest from pywps import Service from pywps.tests import assert_response_success from .common", "from pywps.tests import assert_response_success from .common import client_for from malleefowl.processes import processes def", "request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [ 'download',", "assert_response_success from .common import client_for from malleefowl.processes import processes def test_wps_caps(): client =", "def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities'", "client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split()) == [", "pytest from pywps import Service from pywps.tests import assert_response_success from .common import client_for", "malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0')", "resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier') assert sorted(names.split())", "import pytest from pywps import Service from pywps.tests import assert_response_success from .common import", "= client_for(Service(processes=processes)) resp = client.get(service='wps', request='getcapabilities', version='1.0.0') names = resp.xpath_text('/wps:Capabilities' '/wps:ProcessOfferings' '/wps:Process' '/ows:Identifier')", ".common import client_for from malleefowl.processes import processes def test_wps_caps(): client = client_for(Service(processes=processes)) resp" ]
[ "- Production/Stable\", \"License :: OSI Approved :: MIT License\", \"Operating System :: MacOS\",", ":: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language :: Python ::", "\"Programming Language :: Python :: Implementation :: PyPy\", \"Topic :: Software Development ::", "Language :: Python :: 3.7\", \"Programming Language :: Python :: Implementation :: CPython\",", "Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"Development Status ::", "Language :: Python :: 2.7\", \"Programming Language :: Python :: 3.4\", \"Programming Language", ":: Implementation :: CPython\", \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic", "= f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\",", ":: Python :: Implementation :: PyPy\", \"Topic :: Software Development :: Libraries ::", "\"Environment :: Console\", \"Environment :: Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience", "setuptools import setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ =", "!=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for", "install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\",", "Language :: Python :: Implementation :: CPython\", \"Programming Language :: Python :: Implementation", "\"Programming Language :: Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming", "\"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming Language :: Python", "description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\",", "\"Programming Language :: Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming", ":: Developers\", \"Intended Audience :: Education\", \"Development Status :: 5 - Production/Stable\", \"License", "\"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\",", "f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\"", ":: 3.4\", \"Programming Language :: Python :: 3.5\", \"Programming Language :: Python ::", "\"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming Language ::", ":: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language :: Python ::", "License\", \"Operating System :: MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating System", "!=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python", "<NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description = f.read() setup(", ":: Python :: 3.4\", \"Programming Language :: Python :: 3.5\", \"Programming Language ::", "long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming", "Audience :: Education\", \"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved", "[console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\",", ":: MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating", "\"Intended Audience :: Education\", \"Development Status :: 5 - Production/Stable\", \"License :: OSI", "\"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved :: MIT License\",", "], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\",", "for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"],", "Implementation :: PyPy\", \"Topic :: Software Development :: Libraries :: Python Modules\", \"Environment", "long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[", "!=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A", "python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\",", "Python :: 3.7\", \"Programming Language :: Python :: Implementation :: CPython\", \"Programming Language", "\"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"Development Status :: 5 -", ":: Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"Development Status", "Language :: Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language", "Language :: Python :: Implementation :: PyPy\", \"Topic :: Software Development :: Libraries", "\"Operating System :: Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating System ::", ":: Python :: 2.7\", \"Programming Language :: Python :: 3.4\", \"Programming Language ::", "MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating System", "setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with", "3.7\", \"Programming Language :: Python :: Implementation :: CPython\", \"Programming Language :: Python", "\"Environment :: Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"Development", "\"Programming Language :: Python :: 2.7\", \"Programming Language :: Python :: 3.4\", \"Programming", "3.4\", \"Programming Language :: Python :: 3.5\", \"Programming Language :: Python :: 3.6\",", "Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language :: Python", "f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\",", "\"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\",", "\"Operating System :: MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating System ::", "import setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\"", "\"1.4.0\" with open(\"README.md\", \"r\") as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"],", ":: Console\", \"Environment :: Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience ::", ":: Python Modules\", \"Environment :: Console\", \"Environment :: Web Environment\", \"Intended Audience ::", "\"Programming Language :: Python :: Implementation :: CPython\", \"Programming Language :: Python ::", "license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming Language :: Python ::", "CPython\", \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic :: Software Development", "2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description = f.read()", "\"Programming Language :: Python :: 3.4\", \"Programming Language :: Python :: 3.5\", \"Programming", "Status :: 5 - Production/Stable\", \"License :: OSI Approved :: MIT License\", \"Operating", "MIT License\", \"Operating System :: MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating", ":: PyPy\", \"Topic :: Software Development :: Libraries :: Python Modules\", \"Environment ::", "= \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__,", "3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language :: Python :: Implementation", "<filename>setup.py from setuptools import setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\"", ":: Python :: 3.7\", \"Programming Language :: Python :: Implementation :: CPython\", \"Programming", "Implementation :: CPython\", \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic ::", ":: MIT License\", \"Operating System :: MacOS\", \"Operating System :: Microsoft :: Windows\",", "url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python", ":: 5 - Production/Stable\", \"License :: OSI Approved :: MIT License\", \"Operating System", "\"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming Language", "Python :: 3.4\", \"Programming Language :: Python :: 3.5\", \"Programming Language :: Python", "__author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\",", "Audience :: Developers\", \"Intended Audience :: Education\", \"Development Status :: 5 - Production/Stable\",", "Libraries :: Python Modules\", \"Environment :: Console\", \"Environment :: Web Environment\", \"Intended Audience", "OSI Approved :: MIT License\", \"Operating System :: MacOS\", \"Operating System :: Microsoft", "Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\",", "packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run", ":: Software Development :: Libraries :: Python Modules\", \"Environment :: Console\", \"Environment ::", "setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ],", "\"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description =", ":: Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language ::", ":: Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language ::", "from setuptools import setup __author__ = \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__", "__copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f:", "Python Modules\", \"Environment :: Console\", \"Environment :: Web Environment\", \"Intended Audience :: Developers\",", "long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[", ":: Implementation :: PyPy\", \"Topic :: Software Development :: Libraries :: Python Modules\",", ":: Libraries :: Python Modules\", \"Environment :: Console\", \"Environment :: Web Environment\", \"Intended", "\"Topic :: Software Development :: Libraries :: Python Modules\", \"Environment :: Console\", \"Environment", ":: Education\", \"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved ::", "Production/Stable\", \"License :: OSI Approved :: MIT License\", \"Operating System :: MacOS\", \"Operating", "Python :: 2.7\", \"Programming Language :: Python :: 3.4\", \"Programming Language :: Python", "Python :: Implementation :: CPython\", \"Programming Language :: Python :: Implementation :: PyPy\",", "\"Programming Language :: Python :: 3.7\", \"Programming Language :: Python :: Implementation ::", "Approved :: MIT License\", \"Operating System :: MacOS\", \"Operating System :: Microsoft ::", "Console\", \"Environment :: Web Environment\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\",", "\"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming", "\"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description,", "Language :: Python :: 3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language", "\"License :: OSI Approved :: MIT License\", \"Operating System :: MacOS\", \"Operating System", "= \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description", "entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\",", "Python :: 3.6\", \"Programming Language :: Python :: 3.7\", \"Programming Language :: Python", "!=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts] ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper", "author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language", ":: 2.7\", \"Programming Language :: Python :: 3.4\", \"Programming Language :: Python ::", "keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python :: 2.7\",", "with open(\"README.md\", \"r\") as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7,", "wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\",", "Software Development :: Libraries :: Python Modules\", \"Environment :: Console\", \"Environment :: Web", "\"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\") as", "__version__ = \"1.4.0\" with open(\"README.md\", \"r\") as f: long_description = f.read() setup( name=\"pyngrok\",", "Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating System :: Unix\" ] )", "Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\",", ":: Python :: Implementation :: CPython\", \"Programming Language :: Python :: Implementation ::", "Python :: Implementation :: PyPy\", \"Topic :: Software Development :: Libraries :: Python", "System :: MacOS\", \"Operating System :: Microsoft :: Windows\", \"Operating System :: POSIX\",", "classifiers=[ \"Programming Language :: Python :: 2.7\", \"Programming Language :: Python :: 3.4\",", "author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language ::", "version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\" [console_scripts]", ":: CPython\", \"Programming Language :: Python :: Implementation :: PyPy\", \"Topic :: Software", "name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*\", install_requires=[ \"future\", \"pyyaml\" ], entry_points=\"\"\"", "5 - Production/Stable\", \"License :: OSI Approved :: MIT License\", \"Operating System ::", "\"r\") as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*,", "System :: Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating System :: Unix\"", ":: Microsoft :: Windows\", \"Operating System :: POSIX\", \"Operating System :: Unix\" ]", "= \"<NAME>\" __copyright__ = \"Copyright 2019, <NAME>\" __version__ = \"1.4.0\" with open(\"README.md\", \"r\")", "Education\", \"Development Status :: 5 - Production/Stable\", \"License :: OSI Approved :: MIT", "Language :: Python :: 3.4\", \"Programming Language :: Python :: 3.5\", \"Programming Language", "Developers\", \"Intended Audience :: Education\", \"Development Status :: 5 - Production/Stable\", \"License ::", ":: OSI Approved :: MIT License\", \"Operating System :: MacOS\", \"Operating System ::", "ngrok=pyngrok.ngrok:run \"\"\", description=\"A Python wrapper for Ngrok.\", long_description=long_description, long_description_content_type=\"text/markdown\", author=\"<NAME>\", author_email=\"<EMAIL>\", url=\"https://github.com/alexdlaird/pyngrok\", download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__),", "download_url=\"https://github.com/alexdlaird/pyngrok/archive/{}.tar.gz\".format(__version__), keywords=[\"ngrok\", \"tunnel\", \"tunneling\", \"webhook\", \"localhost\"], license=\"MIT\", classifiers=[ \"Programming Language :: Python ::", "2.7\", \"Programming Language :: Python :: 3.4\", \"Programming Language :: Python :: 3.5\",", "open(\"README.md\", \"r\") as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*,", "as f: long_description = f.read() setup( name=\"pyngrok\", version=__version__, packages=[\"pyngrok\"], python_requires=\">=2.7, !=3.0.*, !=3.1.*, !=3.2.*,", "PyPy\", \"Topic :: Software Development :: Libraries :: Python Modules\", \"Environment :: Console\",", "3.5\", \"Programming Language :: Python :: 3.6\", \"Programming Language :: Python :: 3.7\",", "Modules\", \"Environment :: Console\", \"Environment :: Web Environment\", \"Intended Audience :: Developers\", \"Intended", "Environment\", \"Intended Audience :: Developers\", \"Intended Audience :: Education\", \"Development Status :: 5", ":: 3.7\", \"Programming Language :: Python :: Implementation :: CPython\", \"Programming Language ::", "Development :: Libraries :: Python Modules\", \"Environment :: Console\", \"Environment :: Web Environment\"," ]
[ "\"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE,", "= \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train',", "command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' ) def", "= serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler", "\"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE,", "dsl import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION", "arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\",", "variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\"", "tracks' ) def trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen =", "@dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' ) def trackml(): train =", "description='A pipeline that predicts particle tracks' ) def trackml(): train = train_op() serve", "particle tracks' ) def trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen", "serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler", ").apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml',", "image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks'", "name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret())", "= \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"],", ").apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' ) def trackml(): train", "import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION =", "image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def", "TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[", "kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\"", "name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle", "import kfp.dsl as dsl import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE", ")#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\",", "Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION", "KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp(", "train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__':", "gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE =", "that predicts particle tracks' ) def trackml(): train = train_op() serve = serve_op()", "#!/usr/bin/env python3 import kfp.dsl as dsl import kfp.gcp as gcp # Pipeline input", "resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml, __file__ +", "= train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ ==", "image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION),", "as gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE", "\"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"],", "\"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return", "TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' )", "def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op():", "def trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve)", "trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if", "dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ]", ") def trackml(): train = train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op()", "TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret()", "arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return", "resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline", "command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\",", "def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply", "pipeline that predicts particle tracks' ) def trackml(): train = train_op() serve =", "resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml, __file__ + '.tar.gz')", "resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml,", "-f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"],", "input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION =", "serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f", "] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline(", "name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE,", "= \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op():", "arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts particle tracks' ) def trackml():", "name='trackml', description='A pipeline that predicts particle tracks' ) def trackml(): train = train_op()", "KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op():", "train = train_op() serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__", "\"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp(", "dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve',", "serve = serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import", "\"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen',", "as dsl import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\"", "TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION),", "= resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as compiler compiler.Compiler().compile(trackml, __file__", "kfp.dsl as dsl import kfp.gcp as gcp # Pipeline input variables. KUBECTL_IMAGE =", "\"--command\", \"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION),", ").apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\",", "train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return", "predicts particle tracks' ) def trackml(): train = train_op() serve = serve_op() serve.after(train)", "def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A", "serve_op() serve.after(train) resultsgen = resultsgen_op() resultsgen.after(serve) if __name__ == '__main__': import kfp.compiler as", "return dsl.ContainerOp( name='serve', image=\"{}:{}\".format(KUBECTL_IMAGE, KUBECTL_IMAGE_VERSION), arguments=[ \"/src/set_kubectl.sh\", \"--namespace\", \"kubeflow\", \"--command\", \"apply -f /src/k8s/serve.yaml\",", "return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that", "= \"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1)", "python3 import kfp.dsl as dsl import kfp.gcp as gcp # Pipeline input variables.", "\"apply -f /src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"],", "return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def serve_op(): return dsl.ContainerOp(", "KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\" TRACKML_IMAGE_VERSION = \"1\" def", "dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret()) @dsl.pipeline( name='trackml', description='A pipeline that predicts", "# Pipeline input variables. KUBECTL_IMAGE = \"gcr.io/mcas-195423/trackml_master_kfp_kubectl\" KUBECTL_IMAGE_VERSION = \"1\" TRACKML_IMAGE = \"gcr.io/mcas-195423/trackml_master_trackml\"", "\"1\" def train_op(): return dsl.ContainerOp( name='train', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"train.py\"], ).apply(gcp.use_gcp_secret() )#.set_gpu_limit(1) def", "/src/k8s/serve.yaml\", ] ).apply(gcp.use_gcp_secret()) def resultsgen_op(): return dsl.ContainerOp( name='resultsgen', image=\"{}:{}\".format(TRACKML_IMAGE, TRACKML_IMAGE_VERSION), command=[\"python\"], arguments=[\"resultsgen.py\"], ).apply(gcp.use_gcp_secret())" ]
[ "__name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name", "= infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,)", "= argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args =", "'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0", "<reponame>aleasoluciones/infrabbitmq<filename>bin/ticker.py # -*- coding: utf-8 -*- import time import puka import argparse import", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try: parser", "main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError,", "TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event", "= 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1", "= 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2", "network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins':", "0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60 ==", "TICK_2_MINUTES, network, secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins)", "puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network,", "import utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from", "2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins %", "True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if", "( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins):", "-*- coding: utf-8 -*- import time import puka import argparse import logging from", "secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs", "+= 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs % 60", "secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network,", "TICK_5_MINUTES, network, secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event,", "% 60 == 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)')", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs % 60 == 0:", "publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2 == 0:", "import puka import argparse import logging from infcommon import utils from infrabbitmq import", "publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs =", ") def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event,", "TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n',", "0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs", "publisher, TICK_1_SECOND, network, secs, mins) if secs % 60 == 0: mins +=", "rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5 ==", "mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs,", "network, secs, mins) if secs % 60 == 0: mins += 1 secs", "args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as exc: logging.critical(\"Ticker Fails:", "infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import (", "-*- import time import puka import argparse import logging from infcommon import utils", "{}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer()", "network, secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES,", "infcommon import utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError", "mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs +=", "coding: utf-8 -*- import time import puka import argparse import logging from infcommon", "0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2 ==", "(ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as", "from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import", "publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick':", "logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network):", "TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs))", "1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs % 60 ==", "help='Network name (ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except", "infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, )", "= 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1)", "publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try: parser =", "mins) if secs % 60 == 0: mins += 1 secs = 0", "secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs %", "secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins})", "c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as exc:", "event, network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs,", "publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {} {}\".format(event,", "publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs % 60 == 0: mins", "secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network,", "% 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__", "if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins)", "'__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k,", "required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network)", "infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network,", "TICK_1_MINUTE, network, secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs,", "...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as exc: logging.critical(\"Ticker", "infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES,", "% 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins", "def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions =", "publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args() network", "publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError,", "if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network", "+= 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if", "mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True,", "network, secs, mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network',", "from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES,", "import time import puka import argparse import logging from infcommon import utils from", "60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ ==", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60 == 0:", "def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network,", "== '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo,", "name (ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0] main(network) except Exception", "publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "network, secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES,", "parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args", "= parser.parse_args() network = args.network.split('-')[0] main(network) except Exception as exc: logging.critical(\"Ticker Fails: {}\".format(exc))", "TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info(\"publish", "RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher,", "time import puka import argparse import logging from infcommon import utils from infrabbitmq", "utf-8 -*- import time import puka import argparse import logging from infcommon import", "network, data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0", "event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher", "if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins)", "publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser()", "'--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args() network =", "== 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5", "infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while", "% 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins", "secs, mins) if secs % 60 == 0: mins += 1 secs =", "secs, mins) if mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network,", "publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "secs, mins) if __name__ == '__main__': try: parser = argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store',", "action='store', required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args() network = args.network.split('-')[0]", "== 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__':", "0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs,", "5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins %", "argparse.ArgumentParser() parser.add_argument('-n', '--network', action='store', required=True, help='Network name (ilo, c2k, ...)') args = parser.parse_args()", "= (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher,", "argparse import logging from infcommon import utils from infrabbitmq import factory as infrabbitmq_factory", "if secs % 60 == 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "data={'tick': secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins", "utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names", "mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs,", "mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins = 0 rabbitmq_exceptions", "== 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network,", "time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins) if secs", "secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins %", "mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if", "import argparse import logging from infcommon import utils from infrabbitmq import factory as", "mins): logging.info(\"publish event {} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def", "publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins) if mins", "0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_60_MINUTES, network, secs, mins) if __name__ == '__main__': try:", "factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE,", "import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def", "secs = 0 mins = 0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True:", "import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND,", "as infrabbitmq_factory from infrabbitmq.rabbitmq import RabbitMQError from infrabbitmq.events_names import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES,", "60 == 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE,", "from infcommon import utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq import", "mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if", "TICK_1_SECOND, network, secs, mins) if secs % 60 == 0: mins += 1", "secs, 'mins': mins}) def main(network): publisher = infrabbitmq_factory.event_publisher_json_serializer() secs = 0 mins =", "== 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if mins % 60", "secs % 60 == 0: mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event,", "import ( TICK_1_SECOND, TICK_1_MINUTE, TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs,", "import logging from infcommon import utils from infrabbitmq import factory as infrabbitmq_factory from", "KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs,", "mins += 1 secs = 0 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_MINUTE, network, secs, mins)", "mins % 5 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_5_MINUTES, network, secs, mins) if", "network, secs, mins) if mins % 2 == 0: utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES,", "0 rabbitmq_exceptions = (RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions,", "puka import argparse import logging from infcommon import utils from infrabbitmq import factory", "(RabbitMQError, puka.AMQPError, KeyError,) while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND,", "TICK_2_MINUTES, TICK_5_MINUTES, TICK_60_MINUTES, ) def publish_event(publisher, event, network, secs, mins): logging.info(\"publish event {}", "while True: time.sleep(1) secs += 1 utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_1_SECOND, network, secs, mins)", "{} {}\".format(event, secs)) publisher.publish(event, network, data={'tick': secs, 'mins': mins}) def main(network): publisher =", "# -*- coding: utf-8 -*- import time import puka import argparse import logging", "utils.do_stuff_with_exponential_backoff(rabbitmq_exceptions, publish_event, publisher, TICK_2_MINUTES, network, secs, mins) if mins % 5 == 0:", "logging from infcommon import utils from infrabbitmq import factory as infrabbitmq_factory from infrabbitmq.rabbitmq" ]
[ "Unless required by applicable law or agreed to in writing, software # distributed", "by a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def", "To train the model, you need to first set it back in training", "use instead of a state dictionary loaded from saved weights file. This option", "prediction, we perform one forward pass through the encoder, and then perform several", "loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either:", "used to override said attribute with the supplied ``kwargs`` value. Remaining keys that", "vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\" # keyword arguments come", "are of the same family. If the name of or that path to", "`tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set", "method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder =", "the encoder if an argument named `encoder_hidden_state` is passed to this function. Params:", "``__init__`` function. You can specify kwargs sepcific for the encoder and decoder by", "(e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and a", "``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in the vocabulary. kwargs: (`optional`)", "override the cached versions if they exists. proxies: (`optional`) dict, default None: A", ":class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an automatically loaded configuation.", "- the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the", "this case, ``from_tf`` should be set to True and a configuration object should", "- RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings -", "option can be used if you want to create a model from a", "the name of or that path to a pretrained model is specified the", "in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\" # keyword", "decoder-specific (prefixed by `decoder_`) and those # that apply to the model as", "# Copyright 2018 The HuggingFace Inc. team. # # Licensed under the Apache", "def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: # We will create", "can specify kwargs sepcific for the encoder and decoder by prefixing the key", "} ) # Load and initialize the encoder and decoder # The distinction", "and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be used", "the embedding weights. However the different model classes are inconsistent to that respect:", "a decoder from one or two base classes of the library from pre-trained", "encoder_outputs[ 0 ] # output the last layer hidden state else: encoder_outputs =", "containing missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of", "input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices", "pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder)", "randomly initilized LSTM model as decoder if \"decoder_config\" not in kwargs: raise ValueError(", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "\" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\"))", "using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in two separate directories.", "the different model classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings -", "cached versions if they exists. proxies: (`optional`) dict, default None: A dictionary of", "def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and its configuration file in", "initilized LSTM model as decoder if \"decoder_config\" not in kwargs: raise ValueError( \"To", "decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates", "and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\")", "suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json`", "with the `shortcut name` of a pre-trained model to load from cache or", "If the name of or that path to a pretrained model is specified", "*args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or", "argument. This loading path is slower than converting the TensorFlow checkpoint in a", "the PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning positional", "to use instead of an automatically loaded configuation. Configuration can be automatically loaded", "that we need to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is", "using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to a `tensorflow index", "be used to initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword argument. \"", "been done) - If a configuration is not provided, ``kwargs`` will be first", "parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of configuration parameters that will be", "instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use", "the encoder and decoder # The distinction between encoder and decoder at the", "in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if needed (training, first prediction", "be instantiated as a transformer architecture with one of the base model classes", "default False: Force to (re-)download the model weights and configuration files and override", "(training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs", "if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ]", "to any configuration attribute will be passed to the underlying model's ``__init__`` function.", "\"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is currently supported.\") model", "{'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`)", "and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to both", "Tying the encoder and decoders' embeddings together. We need for each to get", "software # distributed under the License is distributed on an \"AS IS\" BASIS,", "positional arguments will be passed to the underlying model's ``__init__`` method config: (`optional`)", "``output_attention=True``). Behave differently depending on whether a `config` is provided or automatically loaded:", "] # output the last layer hidden state else: encoder_outputs = () #", "to a pretrained model is specified the encoder and the decoder will be", "@classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder", "model classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings", "and configuration files and override the cached versions if they exists. proxies: (`optional`)", "load from cache or download, e.g.: ``bert-base-uncased``. - a path to a `directory`", "class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an", "that corresponds to a configuration attribute will be used to override said attribute", "sequence_length)`` Indices of decoder input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining", "'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args, **kwargs)", "using `model.eval()` (Dropout modules are deactivated) To train the model, you need to", "to also return a dictionnary containing missing keys, unexpected keys and error messages.", "kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument,", "model, but it is \"blocked\" by a model-specific keyword (bert, )... \"\"\" #", "the flag `is_decoder` that we need to set correctly. encoder = kwargs_encoder.pop(\"model\", None)", "possible to override this behavior and initialize, say, the decoder randomly by creating", "supply either: \" \" - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\"", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "``./my_model_directory/encoder``. - a path or url to a `tensorflow index checkpoint file` (e.g.", "keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args,", "by the library (loaded with the ``shortcut-name`` string of a pretrained model), or", "one forward pass through the encoder, and then perform several forward passes with", "on whether a `config` is provided or automatically loaded: - If a configuration", "return model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and its configuration", "are used on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also", "value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and", "Sequence of positional arguments: All remaning positional arguments will be passed to the", "not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]:", "come in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`)", "specific kwargs override the common ones in case of conflict. kwargs_common = {", "# that apply to the model as a whole. # We let the", "or\" \" - a dictionary of configuration parameters that will be used to", "conflict. kwargs_common = { argument: value for argument, value in kwargs.items() if not", "is \"blocked\" by a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass", "relevant updates to the configuration have already been done) - If a configuration", "of the encoder and decoder are of the same family. If the name", "to update the configuration object (after it being loaded) and initiate the model.", "optional state dictionnary for the model to use instead of a state dictionary", "PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be instantiated as", "a seq2eq depends what we are performing: - During training we perform one", "value for argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") }", "forward passes with the encoder's hidden state through the decoder to decode a", "class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the encoder", "AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model", "for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if needed", "2018 The HuggingFace Inc. team. # # Licensed under the Apache License, Version", "please supply either: \" \" - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`),", "\"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder", "# The distinction between encoder and decoder at the model level is made", "decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder)", "to the configuration have already been done) - If a configuration is not", "apply to the model as a whole. # We let the specific kwargs", "decoder; - During prediction, we perform one forward pass through the encoder, and", "The remaining kwargs will be passed to both encoders and decoders. Examples:: model", "Each key of ``kwargs`` that corresponds to a configuration attribute will be used", "a configuration JSON file named `config.json` is found in the directory. state_dict: (`optional`)", "an optional state dictionnary for the model to use instead of a state", "self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not", "model from a pretrained configuration but load your own weights. In this case", "or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert", "argument, value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value", "configuration parameters that will be used to initialize a\" \" torch.nn.LSTM model as", "from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: # We will create a", "of an automatically loaded configuation. Configuration can be automatically loaded when: - the", "Indices of encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape", "say, the decoder randomly by creating it beforehand as follows config = BertConfig.from_pretrained()", "`directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or", "): r\"\"\" Instantiates an encoder and a decoder from one or two base", "using the provided conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information", "a randomly initilized LSTM model as decoder if \"decoder_config\" not in kwargs: raise", "model = cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq", "initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size':", "@classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path or", "print_function, unicode_literals import logging import os import torch from torch import nn from", "and decoder; - During prediction, we perform one forward pass through the encoder,", "encoder and decoder at the model level is made # by the value", "argument.startswith(\"decoder_\") } ) # Load and initialize the encoder and decoder # The", "model using the provided conversion scripts and loading the PyTorch model afterwards. model_args:", "= self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the last layer", "directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The", "not use this file except in compliance with the License. # You may", "the value of the flag `is_decoder` that we need to set correctly. encoder", "PyTorch model using the provided conversion scripts and loading the PyTorch model afterwards.", "XEmbedding layer for each model, but it is \"blocked\" by a model-specific keyword", "the encoder, and then perform several forward passes with the encoder's hidden state", "(prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those # that apply", "directory. state_dict: (`optional`) dict: an optional state dictionnary for the model to use", "in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and initialize the encoder and", "suppling the save directory. - the model is loaded by suppling a local", "one of the base model classes of the library as encoder and (optionally)", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "modules are deactivated) To train the model, you need to first set it", "to first set it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information", "arguments. \"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed by #", "encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args,", "to a configuration attribute will be used to override said attribute with the", "a transformer architecture with one of the base model classes of the library", "`directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or", "tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\" #", "agreed to in writing, software # distributed under the License is distributed on", "state_dict: (`optional`) dict: an optional state dictionnary for the model to use instead", "argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and initialize the", "*args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: # We will create a randomly", "are deactivated) To train the model, you need to first set it back", "is None: # We will create a randomly initilized LSTM model as decoder", "model is specified the encoder and the decoder will be initialized with the", "want to create a model from a pretrained configuration but load your own", "also return a dictionnary containing missing keys, unexpected keys and error messages. kwargs:", "``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor``", "*model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if decoder is", "weights are not present). It is possible to override this behavior and initialize,", "kwargs.get(\"decoder_model\", None) is None: # We will create a randomly initilized LSTM model", "be initialized with the pretrained weight (the cross-attention will be intialized randomly if", "dictionnary for the model to use instead of a state dictionary loaded from", "input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments.", "not present). It is possible to override this behavior and initialize, say, the", "decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs):", "configuration but load your own weights. In this case though, you should check", "as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\"", "loaded: - If a configuration is provided with ``config``, ``**kwargs`` will be directly", "check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`)", "Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the encoder and", "instead of an automatically loaded configuation. Configuration can be automatically loaded when: -", "be passed to the underlying model's ``__init__`` function. You can specify kwargs sepcific", "forward pass on a seq2eq depends what we are performing: - During training", "'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`) boolean: Set", "model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to", "saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to a `tensorflow", "output the last layer hidden state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"]", "): raise ValueError(\"Only the Bert model is currently supported.\") model = super(Model2Model, cls).from_pretrained(", "ValueError( \"To load an LSTM in Encoder-Decoder model, please supply either: \" \"", "of encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size,", "with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder", "to in writing, software # distributed under the License is distributed on an", "= super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod", "instantiated as a transformer architecture with one of the base model classes of", "default None: A dictionary of proxy servers to use by protocol or endpoint,", "implied. # See the License for the specific language governing permissions and #", "from __future__ import absolute_import, division, print_function, unicode_literals import logging import os import torch", "not a simpler option. cache_dir: (`optional`) string: Path to a directory in which", "override said attribute with the supplied ``kwargs`` value. Remaining keys that do not", "``__init__`` method (we assume all relevant updates to the configuration have already been", "or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is currently supported.\")", "argument[len(\"encoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update(", "create a model from a pretrained configuration but load your own weights. In", "HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0 (the", "attribute with the supplied ``kwargs`` value. Remaining keys that do not correspond to", "case, ``from_tf`` should be set to True and a configuration object should be", "**kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\",", "pass on a seq2eq depends what we are performing: - During training we", "pre-trained model configuration should be cached if the standard cache should not be", ") # Encode if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None)", "configuration attribute will be used to override said attribute with the supplied ``kwargs``", "Apache License, Version 2.0 (the \"License\"); # you may not use this file", "specific language governing permissions and # limitations under the License. \"\"\" Classes to", "Classes to support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division, print_function, unicode_literals", "In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is", "or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case,", "value of the flag `is_decoder` that we need to set correctly. encoder =", "on a seq2eq depends what we are performing: - During training we perform", "encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\",", "TensorFlow checkpoint in a PyTorch model using the provided conversion scripts and loading", "need for each to get down to the embedding weights. However the different", "# See the License for the specific language governing permissions and # limitations", "the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "in a format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save", "it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on", "(`decoder_model=lstm_model`), or\" \" - a dictionary of configuration parameters that will be used", "be used. force_download: (`optional`) boolean, default False: Force to (re-)download the model weights", "initialize, say, the decoder randomly by creating it beforehand as follows config =", "the same family. If the name of or that path to a pretrained", "any configuration attribute will be passed to the underlying model's ``__init__`` function. You", "the Apache License, Version 2.0 (the \"License\"); # you may not use this", "you may not use this file except in compliance with the License. #", "- a path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`).", "need to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is None: encoder", "Set to ``True`` to also return a dictionnary containing missing keys, unexpected keys", "pretrained configuration but load your own weights. In this case though, you should", "(prefixed by `decoder_`) and those # that apply to the model as a", "use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are", "`:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in two separate directories. \"\"\"", "automatically loaded configuation. Configuration can be automatically loaded when: - the model is", "Copyright 2018 The HuggingFace Inc. team. # # Licensed under the Apache License,", "( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path", "from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class", "files and override the cached versions if they exists. proxies: (`optional`) dict, default", "configuration is provided with ``config``, ``**kwargs`` will be directly passed to the underlying", "from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an automatically loaded", "to use instead of a state dictionary loaded from saved weights file. This", "model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path,", "and (optionally) another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method.", "\"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on", "# Licensed under the Apache License, Version 2.0 (the \"License\"); # you may", "decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: - a string with the", "several forward passes with the encoder's hidden state through the decoder to decode", "(e.g. ``output_attention=True``). Behave differently depending on whether a `config` is provided or automatically", "You can specify kwargs sepcific for the encoder and decoder by prefixing the", "own weights. In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and", "model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments come in", "roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model, but it is \"blocked\"", "model's ``__init__`` method (we assume all relevant updates to the configuration have already", "provided by the library (loaded with the ``shortcut-name`` string of a pretrained model),", "the provided conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary", "encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ):", "the configuration object (after it being loaded) and initiate the model. (e.g. ``output_attention=True``).", ")... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if", "This option can be used if you want to create a model from", "Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: # We", "= encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return", "passed to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize", "= Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def", "protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on", "a configuration is provided with ``config``, ``**kwargs`` will be directly passed to the", "We will create a randomly initilized LSTM model as decoder if \"decoder_config\" not", "weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to a", "of or that path to a pretrained model is specified the encoder and", "= BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args,", "loading path is slower than converting the TensorFlow checkpoint in a PyTorch model", "if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy()", "encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on a seq2eq depends what we", "need to first set it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path:", "argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if needed (training,", ") kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\")", "by suppling the save directory. - the model is loaded by suppling a", "directory in which a downloaded pre-trained model configuration should be cached if the", "**kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if decoder is None:", "to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder", "last layer hidden state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states", "*args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and", "the library as encoder and (optionally) another one as decoder when created with", "encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs", "*model_args, **kwargs ): r\"\"\" Instantiates an encoder and a decoder from one or", "two base classes of the library from pre-trained model checkpoints. The model is", "should be cached if the standard cache should not be used. force_download: (`optional`)", "@classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: # We will", "- a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.:", "self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args,", "Either: - a string with the `shortcut name` of a pre-trained model to", ":class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be instantiated as a transformer", "layer hidden state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"]", "self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass", "string with the `shortcut name` of a pre-trained model to load from cache", "PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments come in 3 flavors:", "# keyword arguments come in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific", "model, please supply either: \" \" - a torch.nn.LSTM model as `decoder_model` parameter", "created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__()", "`encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``", "encoder_hidden_states = encoder_outputs[ 0 ] # output the last layer hidden state else:", "permissions and # limitations under the License. \"\"\" Classes to support Encoder-Decoder architectures", "**kwargs ): r\"\"\" Instantiates an encoder and a decoder from one or two", "a downloaded pre-trained model configuration should be cached if the standard cache should", "the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save", "not provided, ``kwargs`` will be first passed to the configuration class initialization function", "to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should", "``**kwargs`` will be directly passed to the underlying model's ``__init__`` method (we assume", "information necessary to initiate the encoder. Either: - a string with the `shortcut", "is a generic model class that will be instantiated as a transformer architecture", "encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)``", "reloaded by suppling the save directory. - the model is loaded by suppling", "model's ``__init__`` function. You can specify kwargs sepcific for the encoder and decoder", "remaining kwargs will be passed to both encoders and decoders. Examples:: model =", "model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None:", "to get down to the embedding weights. However the different model classes are", "} kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for", "forward pass on the encoder if an argument named `encoder_hidden_state` is passed to", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "deactivated) To train the model, you need to first set it back in", "a configuration object should be provided as ``config`` argument. This loading path is", "and a configuration object should be provided as ``config`` argument. This loading path", "provided with ``config``, ``**kwargs`` will be directly passed to the underlying model's ``__init__``", "decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder):", "both of the encoder and decoder are of the same family. If the", "cache should not be used. force_download: (`optional`) boolean, default False: Force to (re-)download", "directly passed to the underlying model's ``__init__`` method (we assume all relevant updates", "will be instantiated as a transformer architecture with one of the base model", "None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder =", "load an LSTM in Encoder-Decoder model, please supply either: \" \" - a", "encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: - a string with the", "of keyword arguments. Can be used to update the configuration object (after it", "boolean: Set to ``True`` to also return a dictionnary containing missing keys, unexpected", "(:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration attribute will be", "the common ones in case of conflict. kwargs_common = { argument: value for", "path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this", "See the License for the specific language governing permissions and # limitations under", "already been done) - If a configuration is not provided, ``kwargs`` will be", "(after it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending", "pass through the encoder, and then perform several forward passes with the encoder's", "the model to use instead of a state dictionary loaded from saved weights", "decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both", "done) - If a configuration is not provided, ``kwargs`` will be first passed", "``__init__`` method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for", "of keyword arguments. \"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed", "kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder =", "We need for each to get down to the embedding weights. However the", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in", "it beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased',", "model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by", "RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM:", "directory. - the model is loaded by suppling a local directory as ``pretrained_model_name_or_path``", "model afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning positional arguments will", "def tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings together. We need for", "the provided conversion scripts and loading the PyTorch model afterwards. model_args: (`optional`) Sequence", "# limitations under the License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\" from", "**kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model", "the pretrained weight (the cross-attention will be intialized randomly if its weights are", "let the specific kwargs override the common ones in case of conflict. kwargs_common", "are performing: - During training we perform one forward pass through both the", "error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be used to", "assume all relevant updates to the configuration have already been done) - If", "decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self,", "**kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings together. We", "at the model level is made # by the value of the flag", "an LSTM in Encoder-Decoder model, please supply either: \" \" - a torch.nn.LSTM", "\" torch.nn.LSTM model as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size':", "# You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "(`optional`) dict, default None: A dictionary of proxy servers to use by protocol", "was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory. -", "file. This option can be used if you want to create a model", "if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string:", "in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder.", "state dictionary loaded from saved weights file. This option can be used if", "save_directory): \"\"\" Save a Seq2Seq model and its configuration file in a format", "the last layer hidden state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] =", "the underlying model's ``__init__`` method config: (`optional`) instance of a class derived from", "arguments come in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by", "super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None,", "encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder and a decoder from", "# Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs =", "be first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of", "bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model, but", "are not present). It is possible to override this behavior and initialize, say,", "torch from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__)", "Force to (re-)download the model weights and configuration files and override the cached", "__init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def", "randomly if its weights are not present). It is possible to override this", "pass through both the encoder and decoder; - During prediction, we perform one", "currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model", "is a model provided by the library (loaded with the ``shortcut-name`` string of", "model weights and configuration files and override the cached versions if they exists.", "you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option.", "model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights()", "messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be used to update", "pretrained model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded", "to the underlying model's ``__init__`` method (we assume all relevant updates to the", "to initiate the decoder. Either: - a string with the `shortcut name` of", "KIND, either express or implied. # See the License for the specific language", "but load your own weights. In this case though, you should check if", "override this behavior and initialize, say, the decoder randomly by creating it beforehand", "to override said attribute with the supplied ``kwargs`` value. Remaining keys that do", "level is made # by the value of the flag `is_decoder` that we", "load your own weights. In this case though, you should check if using", "of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``. -", "to ``True`` to also return a dictionnary containing missing keys, unexpected keys and", "to the embedding weights. However the different model classes are inconsistent to that", "containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url", "None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder =", "LSTM in Encoder-Decoder model, please supply either: \" \" - a torch.nn.LSTM model", "This loading path is slower than converting the TensorFlow checkpoint in a PyTorch", "through both the encoder and decoder; - During prediction, we perform one forward", "ANY KIND, either express or implied. # See the License for the specific", "\"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if (", "of ``kwargs`` that corresponds to a configuration attribute will be used to override", "tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder", "the cached versions if they exists. proxies: (`optional`) dict, default None: A dictionary", "necessary to initiate the decoder. Either: - a string with the `shortcut name`", "instead of a state dictionary loaded from saved weights file. This option can", "argument named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape", "initialize Bert2Bert \"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed by", "be set to True and a configuration object should be provided as ``config``", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See", "one or two base classes of the library from pre-trained model checkpoints. The", "PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: - a", "sequence. Therefore, we skip the forward pass on the encoder if an argument", "decoder from one or two base classes of the library from pre-trained model", "between encoder and decoder at the model level is made # by the", "of decoder input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of", "= kwargs_decoder.pop(\"model\", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder )", "of positional arguments: All remaning positional arguments will be passed to the underlying", "necessary to initiate the encoder. Either: - a string with the `shortcut name`", "path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. -", "a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``. - a", "encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if decoder is None: decoder =", "\"\"\" Save a Seq2Seq model and its configuration file in a format such", "with one of the base model classes of the library as encoder and", "the encoder and decoder; - During prediction, we perform one forward pass through", "``kwargs`` will be first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each", "from pre-trained model checkpoints. The model is set in evaluation mode by default", ":func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory. - the model is", "language governing permissions and # limitations under the License. \"\"\" Classes to support", "encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the", "a pretrained model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is", "each model, but it is \"blocked\" by a model-specific keyword (bert, )... \"\"\"", "PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning positional arguments", "{ argument: value for argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and not", "to override this behavior and initialize, say, the decoder randomly by creating it", "by default using `model.eval()` (Dropout modules are deactivated) To train the model, you", "a pretrained model is specified the encoder and the decoder will be initialized", "= encoder_outputs[ 0 ] # output the last layer hidden state else: encoder_outputs", "else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\",", "- BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each", "not be used. force_download: (`optional`) boolean, default False: Force to (re-)download the model", "we skip the forward pass on the encoder if an argument named `encoder_hidden_state`", "by creating it beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model", "under the License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs)", "is slower than converting the TensorFlow checkpoint in a PyTorch model using the", "get down to the embedding weights. However the different model classes are inconsistent", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "r\"\"\" Instantiates an encoder and a decoder from one or two base classes", "= logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will", "update the configuration object (after it being loaded) and initiate the model. (e.g.", "proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}.", "seq2eq depends what we are performing: - During training we perform one forward", "a full sequence. Therefore, we skip the forward pass on the encoder if", "hidden state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] =", "applicable law or agreed to in writing, software # distributed under the License", "`config.json` is found in the directory. state_dict: (`optional`) dict: an optional state dictionnary", "__future__ import absolute_import, division, print_function, unicode_literals import logging import os import torch from", "and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder.", "simpler option. cache_dir: (`optional`) string: Path to a directory in which a downloaded", "not correspond to any configuration attribute will be passed to the underlying model's", "found in the directory. state_dict: (`optional`) dict: an optional state dictionnary for the", "weight (the cross-attention will be intialized randomly if its weights are not present).", "kwargs override the common ones in case of conflict. kwargs_common = { argument:", "keyword arguments. Can be used to update the configuration object (after it being", "CONDITIONS OF ANY KIND, either express or implied. # See the License for", "model configuration should be cached if the standard cache should not be used.", "= AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None)", "the decoder will be initialized with the pretrained weight (the cross-attention will be", "loaded from saved weights file. This option can be used if you want", "cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder and a decoder", "self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the last layer hidden", "Encoder-Decoder model, please supply either: \" \" - a torch.nn.LSTM model as `decoder_model`", "writing, software # distributed under the License is distributed on an \"AS IS\"", "# that apply to the model as whole. # We let the specific", "specified the encoder and the decoder will be initialized with the pretrained weight", "sequence_length)`` Indices of encoder input sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of", "a simpler option. cache_dir: (`optional`) string: Path to a directory in which a", "'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments come in 3 flavors: encoder-specific", "and decoder # The distinction between encoder and decoder at the model level", "Instantiates an encoder and a decoder from one or two base classes of", "first set it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary", "\"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model", "and loading the PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments: All", "decoder at the model level is made # by the value of the", "provided conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to", "specify kwargs sepcific for the encoder and decoder by prefixing the key with", "= False decoder = kwargs_decoder.pop(\"model\", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained(", "compliance with the License. # You may obtain a copy of the License", "RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model, but it is", "True model = cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\" Save a", "ones in case of conflict. kwargs_common = { argument: value for argument, value", "= { argument: value for argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and", "such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and", "`decoder_`) and those # that apply to the model as a whole. #", "sepcific for the encoder and decoder by prefixing the key with `encoder_` and", "architecture with one of the base model classes of the library as encoder", "method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the", "provided, ``kwargs`` will be first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`).", "vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens", "be automatically loaded when: - the model is a model provided by the", "it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters", "\"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ):", "performing: - During training we perform one forward pass through both the encoder", "of configuration parameters that will be used to initialize a\" \" torch.nn.LSTM model", "is provided with ``config``, ``**kwargs`` will be directly passed to the underlying model's", "+ encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of", "the base model classes of the library as encoder and (optionally) another one", "kwargs_common = { argument: value for argument, value in kwargs.items() if not argument.startswith(\"encoder_\")", "that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2:", "**kwargs): \"\"\" The forward pass on a seq2eq depends what we are performing:", "in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and", "from saved weights file. This option can be used if you want to", "kwargs_decoder.pop(\"model\", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder", "path is slower than converting the TensorFlow checkpoint in a PyTorch model using", "kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[", "boolean, default False: Force to (re-)download the model weights and configuration files and", "Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: - a string with", "standard cache should not be used. force_download: (`optional`) boolean, default False: Force to", "and initialize, say, the decoder randomly by creating it beforehand as follows config", "local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found", "if the standard cache should not be used. force_download: (`optional`) boolean, default False:", "proxies: (`optional`) dict, default None: A dictionary of proxy servers to use by", "should not be used. force_download: (`optional`) boolean, default False: Force to (re-)download the", "= () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None )", "encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] #", "pretrained weight (the cross-attention will be intialized randomly if its weights are not", "if its weights are not present). It is possible to override this behavior", ":class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the encoder and decoder are", "(the \"License\"); # you may not use this file except in compliance with", "model, you need to first set it back in training mode with `model.train()`", "the save directory. - the model is loaded by suppling a local directory", "be used to update the configuration object (after it being loaded) and initiate", "set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) To", "`decoder_`) and those # that apply to the model as whole. # We", "for the model to use instead of a state dictionary loaded from saved", "a string with the `shortcut name` of a pre-trained model to load from", "The distinction between encoder and decoder at the model level is made #", "# Unless required by applicable law or agreed to in writing, software #", "Bert2Bert \"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed by #", "by applicable law or agreed to in writing, software # distributed under the", "= kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value in kwargs.items() if", "perform one forward pass through the encoder, and then perform several forward passes", "'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`) boolean:", "from a pretrained configuration but load your own weights. In this case though,", "to decode a full sequence. Therefore, we skip the forward pass on the", "(optionally) another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\"", "supplied ``kwargs`` value. Remaining keys that do not correspond to any configuration attribute", "768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args,", "one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self,", "file except in compliance with the License. # You may obtain a copy", "those # that apply to the model as whole. # We let the", "encoder' and decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\"))", "= AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder) return", "passed to the underlying model's ``__init__`` function. You can specify kwargs sepcific for", "key of ``kwargs`` that corresponds to a configuration attribute will be used to", "an argument named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of", "function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration attribute will", "torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of configuration", "None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the", "if an argument named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor``", "your own weights. In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained`", "r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where both of the encoder and decoder", "not in kwargs: raise ValueError( \"To load an LSTM in Encoder-Decoder model, please", "model to use instead of a state dictionary loaded from saved weights file.", "the encoder. Either: - a string with the `shortcut name` of a pre-trained", "apply to the model as whole. # We let the specific kwargs override", "Remaining dictionary of keyword arguments. Can be used to update the configuration object", "import os import torch from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead", "logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that", "forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on a seq2eq depends what", "initialize the encoder and decoder # The distinction between encoder and decoder at", "OR CONDITIONS OF ANY KIND, either express or implied. # See the License", "may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "the encoder and decoder are of the same family. If the name of", "(prefixed by `decoder_`) and those # that apply to the model as whole.", "checkpoints. The model is set in evaluation mode by default using `model.eval()` (Dropout", "a pretrained configuration but load your own weights. In this case though, you", "decoders' embeddings together. We need for each to get down to the embedding", "and those # that apply to the model as whole. # We let", "Remaining keys that do not correspond to any configuration attribute will be passed", "and # limitations under the License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\"", "function. You can specify kwargs sepcific for the encoder and decoder by prefixing", "return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2 model where", "scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the", "loaded when: - the model is a model provided by the library (loaded", "where both of the encoder and decoder are of the same family. If", "training we perform one forward pass through both the encoder and decoder; -", "\"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed by # `encoder_`),", "correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path,", "model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and its configuration file", "the underlying model's ``__init__`` method (we assume all relevant updates to the configuration", "by the value of the flag `is_decoder` that we need to set correctly.", "- During training we perform one forward pass through both the encoder and", "`model.eval()` (Dropout modules are deactivated) To train the model, you need to first", "is set in evaluation mode by default using `model.eval()` (Dropout modules are deactivated)", "pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``. - a path", "for the specific language governing permissions and # limitations under the License. \"\"\"", "model as a whole. # We let the specific kwargs override the common", "``kwargs`` that corresponds to a configuration attribute will be used to override said", "``kwargs`` value. Remaining keys that do not correspond to any configuration attribute will", "kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class", "checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True", "library as encoder and (optionally) another one as decoder when created with the", "encoder and a decoder from one or two base classes of the library", "`shortcut name` of a pre-trained model to load from cache or download, e.g.:", "used to initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword argument. \" \"", "model where both of the encoder and decoder are of the same family.", "encoder = kwargs_encoder.pop(\"model\", None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args,", "randomly by creating it beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config)", "to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies", "mode by default using `model.eval()` (Dropout modules are deactivated) To train the model,", "or that path to a pretrained model is specified the encoder and the", "if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is", "decoder to decode a full sequence. Therefore, we skip the forward pass on", "class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration", "= kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value", "base model classes of the library as encoder and (optionally) another one as", "be cached if the standard cache should not be used. force_download: (`optional`) boolean,", "The forward pass on a seq2eq depends what we are performing: - During", "pass on the encoder if an argument named `encoder_hidden_state` is passed to this", "case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a", "def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on a seq2eq depends", "common ones in case of conflict. kwargs_common = { argument: value for argument,", "have already been done) - If a configuration is not provided, ``kwargs`` will", "Licensed under the Apache License, Version 2.0 (the \"License\"); # you may not", "the encoder and decoder by prefixing the key with `encoder_` and `decoder_` respectively.", "same family. If the name of or that path to a pretrained model", "model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config` is provided or", "architectures \"\"\" from __future__ import absolute_import, division, print_function, unicode_literals import logging import os", "a Seq2Seq2 model where both of the encoder and decoder are of the", "value in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy()", "named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size,", "self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in", "the configuration have already been done) - If a configuration is not provided,", "a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path", "``True`` to also return a dictionnary containing missing keys, unexpected keys and error", "different model classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa:", "the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder`", "the XEmbedding layer for each model, but it is \"blocked\" by a model-specific", "if argument.startswith(\"decoder_\") } ) # Encode if needed (training, first prediction pass) encoder_hidden_states", "or download, e.g.: ``bert-base-uncased``. - a path to a `directory` containing model weights", "It is possible to override this behavior and initialize, say, the decoder randomly", "whole. # We let the specific kwargs override the common ones in case", "with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed", "for argument, value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]:", "**kwargs): if kwargs.get(\"decoder_model\", None) is None: # We will create a randomly initilized", "as decoder if \"decoder_config\" not in kwargs: raise ValueError( \"To load an LSTM", "classes of the library as encoder and (optionally) another one as decoder when", "the supplied ``kwargs`` value. Remaining keys that do not correspond to any configuration", "def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod", "will create a randomly initilized LSTM model as decoder if \"decoder_config\" not in", "from one or two base classes of the library from pre-trained model checkpoints.", "that will be used to initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword", "initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a configuration attribute", "to the underlying model's ``__init__`` function. You can specify kwargs sepcific for the", "if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder =", "We save the encoder' and decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory,", "GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer", "encoder, and then perform several forward passes with the encoder's hidden state through", "dict, default None: A dictionary of proxy servers to use by protocol or", "keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be", ":]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode", "for each model, but it is \"blocked\" by a model-specific keyword (bert, )...", "the License for the specific language governing permissions and # limitations under the", "as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder,", "value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if", "model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of configuration parameters", "intialized randomly if its weights are not present). It is possible to override", "keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments.", "is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return", "flag `is_decoder` that we need to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if", "either: \" \" - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \"", "dictionary loaded from saved weights file. This option can be used if you", "- the model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and", "perform several forward passes with the encoder's hidden state through the decoder to", "it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate", "a\" \" torch.nn.LSTM model as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768,", "with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: - a", "and decoder by prefixing the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``).", "**kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder) return model def save_pretrained(self,", "decoder. Either: - a string with the `shortcut name` of a pre-trained model", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #", "of the library from pre-trained model checkpoints. The model is set in evaluation", "automatically loaded when: - the model is a model provided by the library", "will be passed to the underlying model's ``__init__`` method config: (`optional`) instance of", "correspond to any configuration attribute will be passed to the underlying model's ``__init__``", "configuration JSON file named `config.json` is found in the directory. state_dict: (`optional`) dict:", "then perform several forward passes with the encoder's hidden state through the decoder", "Encode if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states", "- the model is a model provided by the library (loaded with the", "differently depending on whether a `config` is provided or automatically loaded: - If", "as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of configuration parameters that", "a path or url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In", "and decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def", "encoder and decoder are of the same family. If the name of or", "E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model =", "argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( {", "``shortcut-name`` string of a pretrained model), or - the model was saved using", "a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``.", "if argument.startswith(\"decoder_\") } ) # Load and initialize the encoder and decoder #", "inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings", "} ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value in kwargs.items() if", "and then perform several forward passes with the encoder's hidden state through the", "(`optional`) boolean: Set to ``True`` to also return a dictionnary containing missing keys,", "Version 2.0 (the \"License\"); # you may not use this file except in", "None) is None: # We will create a randomly initilized LSTM model as", "\"\"\" The forward pass on a seq2eq depends what we are performing: -", "is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder", "Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids,", "e.g.: ``./my_model_directory/decoder``. - a path or url to a `tensorflow index checkpoint file`", "on the encoder if an argument named `encoder_hidden_state` is passed to this function.", "each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return a dictionnary", "() # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs", ") encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if decoder is None: decoder", "0 ] # output the last layer hidden state else: encoder_outputs = ()", "cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls,", "to support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division, print_function, unicode_literals import", "a generic model class that will be instantiated as a transformer architecture with", "to load from cache or download, e.g.: ``bert-base-uncased``. - a path to a", "keyword arguments come in 3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed", "a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of", "default using `model.eval()` (Dropout modules are deactivated) To train the model, you need", "will be used to override said attribute with the supplied ``kwargs`` value. Remaining", "as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the", "argument: value for argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\")", "# self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\"", "decoder will be initialized with the pretrained weight (the cross-attention will be intialized", "weights. In this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained`", "config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self,", "sequence tokens in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of", "if kwargs.get(\"decoder_model\", None) is None: # We will create a randomly initilized LSTM", "instantiates a Seq2Seq2 model where both of the encoder and decoder are of", "though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler", "AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder) return model", "the model to use instead of an automatically loaded configuation. Configuration can be", "configuration attribute will be passed to the underlying model's ``__init__`` function. You can", "what we are performing: - During training we perform one forward pass through", "decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in", "(`optional`) Remaining dictionary of keyword arguments. Can be used to update the configuration", "in case of conflict. kwargs_common = { argument: value for argument, value in", "as whole. # We let the specific kwargs override the common ones in", "provided as ``config`` argument. This loading path is slower than converting the TensorFlow", "cross-attention will be intialized randomly if its weights are not present). It is", "prefixing the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs", "shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in the vocabulary. kwargs:", "`AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder", "import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\"", "encoder and decoder; - During prediction, we perform one forward pass through the", "model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: - a string", "= encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs", "of the flag `is_decoder` that we need to set correctly. encoder = kwargs_encoder.pop(\"model\",", "that do not correspond to any configuration attribute will be passed to the", "model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory.", "initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config` is", "follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def", "will be used to initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword argument.", "Seq2Seq2 model where both of the encoder and decoder are of the same", "AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class", "as ``config`` argument. This loading path is slower than converting the TensorFlow checkpoint", "tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings together. We need for each", "class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is None: #", "e.g.: ``bert-base-uncased``. - a path to a `directory` containing model weights saved using", "self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a Seq2Seq2", "loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file", "kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value", "\"decoder_config\" not in kwargs: raise ValueError( \"To load an LSTM in Encoder-Decoder model,", "keyword arguments. \"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed by", "OF ANY KIND, either express or implied. # See the License for the", "the model level is made # by the value of the flag `is_decoder`", "to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is None: encoder =", "model's ``__init__`` method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration", "\"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on a seq2eq", "be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in two", "model and its configuration file in a format such that it can be", "the specific kwargs override the common ones in case of conflict. kwargs_common =", "configuation. Configuration can be automatically loaded when: - the model is a model", "afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning positional arguments will be", "encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states", "in a PyTorch model using the provided conversion scripts and loading the PyTorch", "in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids,", "the model, you need to first set it back in training mode with", ":]: value for argument, value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( {", "embeddings together. We need for each to get down to the embedding weights.", "is made # by the value of the flag `is_decoder` that we need", "argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder =", "Configuration for the model to use instead of an automatically loaded configuation. Configuration", "**kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the last layer hidden state", "the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\" # keyword arguments", "downloaded pre-trained model configuration should be cached if the standard cache should not", "model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration", "to True and a configuration object should be provided as ``config`` argument. This", "(`optional`) Sequence of positional arguments: All remaning positional arguments will be passed to", "the forward pass on the encoder if an argument named `encoder_hidden_state` is passed", "the decoder. Either: - a string with the `shortcut name` of a pre-trained", "from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder and a", "- During prediction, we perform one forward pass through the encoder, and then", "cache or download, e.g.: ``bert-base-uncased``. - a path to a `directory` containing model", "Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments come", "return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None) is", ") # Load and initialize the encoder and decoder # The distinction between", "LSTM model as decoder if \"decoder_config\" not in kwargs: raise ValueError( \"To load", "`config` is provided or automatically loaded: - If a configuration is provided with", "configuration object should be provided as ``config`` argument. This loading path is slower", "{ argument[len(\"decoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } )", "`decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" )", "None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder,", "used on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return", "be used if you want to create a model from a pretrained configuration", "Load and initialize the encoder and decoder # The distinction between encoder and", "or agreed to in writing, software # distributed under the License is distributed", "torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module):", "the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder =", "encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None", "absolute_import, division, print_function, unicode_literals import logging import os import torch from torch import", "decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model", "weights and configuration files and override the cached versions if they exists. proxies:", "- RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model, but it", ":func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string: Path to", "will be directly passed to the underlying model's ``__init__`` method (we assume all", "name` of a pre-trained model to load from cache or download, e.g.: ``bert-base-uncased``.", "prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids,", "under the Apache License, Version 2.0 (the \"License\"); # you may not use", "skip the forward pass on the encoder if an argument named `encoder_hidden_state` is", "(`optional`) boolean, default False: Force to (re-)download the model weights and configuration files", "model as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers':", "During training we perform one forward pass through both the encoder and decoder;", "format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder'", "of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in the vocabulary.", "if \"decoder_config\" not in kwargs: raise ValueError( \"To load an LSTM in Encoder-Decoder", "underlying model's ``__init__`` method config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`:", "License. # You may obtain a copy of the License at # #", "index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to", "kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if needed (training, first prediction pass)", "string of a pretrained model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained`", "the standard cache should not be used. force_download: (`optional`) boolean, default False: Force", "a Seq2Seq model and its configuration file in a format such that it", "in which a downloaded pre-trained model configuration should be cached if the standard", "Behave differently depending on whether a `config` is provided or automatically loaded: -", "encoder and (optionally) another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens", "arguments: All remaning positional arguments will be passed to the underlying model's ``__init__``", "A dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http':", "support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division, print_function, unicode_literals import logging", "During prediction, we perform one forward pass through the encoder, and then perform", "= PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments come in 3", "together. We need for each to get down to the embedding weights. However", "encoder's hidden state through the decoder to decode a full sequence. Therefore, we", "not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise", "model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder):", "with the pretrained weight (the cross-attention will be intialized randomly if its weights", "XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of", "License, Version 2.0 (the \"License\"); # you may not use this file except", "class that will be instantiated as a transformer architecture with one of the", "is possible to override this behavior and initialize, say, the decoder randomly by", "# initialize Bert2Bert \"\"\" # keyword arguments come in 3 flavors: encoder-specific (prefixed", "BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM:", ") return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if kwargs.get(\"decoder_model\", None)", "kwargs: (`optional`) Remaining dictionary of keyword arguments. Can be used to update the", "but it is \"blocked\" by a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder,", "However the different model classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings", "of conflict. kwargs_common = { argument: value for argument, value in kwargs.items() if", "is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output", "def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the", "(`optional`) Remaining dictionary of keyword arguments. \"\"\" # keyword arguments come in 3", "logging import os import torch from torch import nn from .modeling_auto import AutoModel,", "encoder and decoder # The distinction between encoder and decoder at the model", "both the encoder and decoder; - During prediction, we perform one forward pass", "and decoders' embeddings together. We need for each to get down to the", "dictionary of keyword arguments. Can be used to update the configuration object (after", "underlying model's ``__init__`` method (we assume all relevant updates to the configuration have", "raise ValueError(\"Only the Bert model is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path,", "be intialized randomly if its weights are not present). It is possible to", "of the base model classes of the library as encoder and (optionally) another", "decoder by prefixing the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The", "(the cross-attention will be intialized randomly if its weights are not present). It", "or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's", "when: - the model is a model provided by the library (loaded with", "model to load from cache or download, e.g.: ``bert-base-uncased``. - a path to", "to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a", "in the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input", "model to use instead of an automatically loaded configuation. Configuration can be automatically", "checkpoint in a PyTorch model using the provided conversion scripts and loading the", "the specific language governing permissions and # limitations under the License. \"\"\" Classes", ".modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a", "(`optional`) string: Path to a directory in which a downloaded pre-trained model configuration", "updates to the configuration have already been done) - If a configuration is", "a model from a pretrained configuration but load your own weights. In this", "or automatically loaded: - If a configuration is provided with ``config``, ``**kwargs`` will", "configuration have already been done) - If a configuration is not provided, ``kwargs``", "both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\"", "AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if", "state else: encoder_outputs = () # Decode kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get(", "to create a model from a pretrained configuration but load your own weights.", "two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs):", "embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the", "its configuration file in a format such that it can be loaded using", "and its configuration file in a format such that it can be loaded", "encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in", "the model is a model provided by the library (loaded with the ``shortcut-name``", "using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string: Path", "be used to override said attribute with the supplied ``kwargs`` value. Remaining keys", "or implied. # See the License for the specific language governing permissions and", "Can be used to update the configuration object (after it being loaded) and", ":func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string: Path to a directory", "and a decoder from one or two base classes of the library from", "a state dictionary loaded from saved weights file. This option can be used", "kwargs_decoder[\"encoder_hidden_states\"] = encoder_hidden_states kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder)", "\" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model", "classes are inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings -", "of a state dictionary loaded from saved weights file. This option can be", "respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to both encoders and", "forward pass through the encoder, and then perform several forward passes with the", "and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config`", "weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to a", "division, print_function, unicode_literals import logging import os import torch from torch import nn", "of the same family. If the name of or that path to a", "or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling", "and decoder are of the same family. If the name of or that", "behavior and initialize, say, the decoder randomly by creating it beforehand as follows", "a configuration attribute will be used to override said attribute with the supplied", "Save a Seq2Seq model and its configuration file in a format such that", "model level is made # by the value of the flag `is_decoder` that", "model checkpoints. The model is set in evaluation mode by default using `model.eval()`", "path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. -", "'foo.bar:4012'}. The proxies are used on each request. output_loading_info: (`optional`) boolean: Set to", "in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument,", "the model as a whole. # We let the specific kwargs override the", "a dictionary of configuration parameters that will be used to initialize a\" \"", "Inc. team. # # Licensed under the Apache License, Version 2.0 (the \"License\");", "if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True", "the License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import,", "governing permissions and # limitations under the License. \"\"\" Classes to support Encoder-Decoder", ") decoder.config.is_decoder = True model = cls(encoder, decoder) return model def save_pretrained(self, save_directory):", "using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to a `tensorflow index", ":]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load", "- a string with the `shortcut name` of a pre-trained model to load", "the ``shortcut-name`` string of a pretrained model), or - the model was saved", "use this file except in compliance with the License. # You may obtain", ":func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to a `tensorflow index checkpoint", "perform one forward pass through both the encoder and decoder; - During prediction,", "with the ``shortcut-name`` string of a pretrained model), or - the model was", "with the encoder's hidden state through the decoder to decode a full sequence.", "``decoder_output_attention=True``). The remaining kwargs will be passed to both encoders and decoders. Examples::", "kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") }", "encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False decoder = kwargs_decoder.pop(\"model\", None) if decoder", "if you want to create a model from a pretrained configuration but load", "3 flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those", "can be used if you want to create a model from a pretrained", "`./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and a configuration", "to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds", "should be set to True and a configuration object should be provided as", "cached if the standard cache should not be used. force_download: (`optional`) boolean, default", "pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the", "they exists. proxies: (`optional`) dict, default None: A dictionary of proxy servers to", "provided or automatically loaded: - If a configuration is provided with ``config``, ``**kwargs``", "override the common ones in case of conflict. kwargs_common = { argument: value", "BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs):", "argument.startswith(\"decoder_\") } ) # Encode if needed (training, first prediction pass) encoder_hidden_states =", "decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained( cls,", "of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in the vocabulary.", "- BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte -", "by prefixing the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining", "url to a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf``", "None: # We will create a randomly initilized LSTM model as decoder if", "an automatically loaded configuation. Configuration can be automatically loaded when: - the model", "a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is", "afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: - a string with", "We let the specific kwargs override the common ones in case of conflict.", "configuration file in a format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained`", "by `decoder_`) and those # that apply to the model as a whole.", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "to a directory in which a downloaded pre-trained model configuration should be cached", "that path to a pretrained model is specified the encoder and the decoder", "def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder and", "full sequence. Therefore, we skip the forward pass on the encoder if an", "for argument, value in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder", "a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``.", "request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return a dictionnary containing", "\"To load an LSTM in Encoder-Decoder model, please supply either: \" \" -", "None: A dictionary of proxy servers to use by protocol or endpoint, e.g.:", "in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only", "a model provided by the library (loaded with the ``shortcut-name`` string of a", "hidden state through the decoder to decode a full sequence. Therefore, we skip", "None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\"", "# distributed under the License is distributed on an \"AS IS\" BASIS, #", "conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate", "as a transformer architecture with one of the base model classes of the", "argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value", "that will be instantiated as a transformer architecture with one of the base", "decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword arguments", "servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The", "kwargs_encoder.pop(\"model\", None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder )", "family. If the name of or that path to a pretrained model is", "False: Force to (re-)download the model weights and configuration files and override the", "# We will create a randomly initilized LSTM model as decoder if \"decoder_config\"", "model is a model provided by the library (loaded with the ``shortcut-name`` string", "pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path", "evaluation mode by default using `model.eval()` (Dropout modules are deactivated) To train the", "all relevant updates to the configuration have already been done) - If a", "and is reloaded by suppling the save directory. - the model is loaded", "\" - a dictionary of configuration parameters that will be used to initialize", "embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings", "initiate the encoder. Either: - a string with the `shortcut name` of a", "__init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder", "with the License. # You may obtain a copy of the License at", "``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in the directory.", "arguments will be passed to the underlying model's ``__init__`` method config: (`optional`) instance", "kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"encoder_\")", "train the model, you need to first set it back in training mode", "not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update(", "wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for", "state through the decoder to decode a full sequence. Therefore, we skip the", "embedding weights. However the different model classes are inconsistent to that respect: -", "is not a simpler option. cache_dir: (`optional`) string: Path to a directory in", "= True model = cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\" Save", "is reloaded by suppling the save directory. - the model is loaded by", "encoder and decoder by prefixing the key with `encoder_` and `decoder_` respectively. (e.g.", "argument of the XEmbedding layer for each model, but it is \"blocked\" by", "a `tensorflow index checkpoint file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be", "will be intialized randomly if its weights are not present). It is possible", "law or agreed to in writing, software # distributed under the License is", "model as whole. # We let the specific kwargs override the common ones", "and those # that apply to the model as a whole. # We", "shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in the vocabulary. decoder_input_ids:", "the model is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a", "needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None:", "argument[len(\"decoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) #", "`is_decoder` that we need to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder", "pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path", "dictionary of configuration parameters that will be used to initialize a\" \" torch.nn.LSTM", "or two base classes of the library from pre-trained model checkpoints. The model", "option. cache_dir: (`optional`) string: Path to a directory in which a downloaded pre-trained", "a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls,", "containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url", "flavors: encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those #", "than converting the TensorFlow checkpoint in a PyTorch model using the provided conversion", "decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model,", "loaded configuation. Configuration can be automatically loaded when: - the model is a", "in compliance with the License. # You may obtain a copy of the", "is provided or automatically loaded: - If a configuration is provided with ``config``,", "the encoder and decoders' embeddings together. We need for each to get down", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "# coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. # # Licensed under", "said attribute with the supplied ``kwargs`` value. Remaining keys that do not correspond", "and the decoder will be initialized with the pretrained weight (the cross-attention will", "= cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model", "each to get down to the embedding weights. However the different model classes", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "download, e.g.: ``bert-base-uncased``. - a path to a `directory` containing model weights saved", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "# `encoder_`), decoder-specific (prefixed by `decoder_`) and those # that apply to the", "string: Path to a directory in which a downloaded pre-trained model configuration should", "raise ValueError( \"To load an LSTM in Encoder-Decoder model, please supply either: \"", "we are performing: - During training we perform one forward pass through both", "by `decoder_`) and those # that apply to the model as whole. #", "the vocabulary. decoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence", "the model. (e.g. ``output_attention=True``). Behave differently depending on whether a `config` is provided", "creating it beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model =", "= BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args,", "and initialize the encoder and decoder # The distinction between encoder and decoder", "cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and", "``config``, ``**kwargs`` will be directly passed to the underlying model's ``__init__`` method (we", "initialized with the pretrained weight (the cross-attention will be intialized randomly if its", "file named `config.json` is found in the directory. state_dict: (`optional`) dict: an optional", "the TensorFlow checkpoint in a PyTorch model using the provided conversion scripts and", "down to the embedding weights. However the different model classes are inconsistent to", "with the supplied ``kwargs`` value. Remaining keys that do not correspond to any", "(Dropout modules are deactivated) To train the model, you need to first set", "first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs``", "- If a configuration is not provided, ``kwargs`` will be first passed to", "will be passed to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased')", "passed to the underlying model's ``__init__`` method config: (`optional`) instance of a class", "model as decoder if \"decoder_config\" not in kwargs: raise ValueError( \"To load an", "forward pass through both the encoder and decoder; - During prediction, we perform", "the encoder' and decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory,", "super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings", "# Encode if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if", "decoder are of the same family. If the name of or that path", "set to True and a configuration object should be provided as ``config`` argument.", "config: (`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model", "def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\"", "configuration is not provided, ``kwargs`` will be first passed to the configuration class", "kwargs_common.copy() kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value in", "key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be", "will be passed to the underlying model's ``__init__`` function. You can specify kwargs", "by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used", "arguments. Can be used to update the configuration object (after it being loaded)", "dictionary of keyword arguments. \"\"\" # keyword arguments come in 3 flavors: encoder-specific", "as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\"", "can be automatically loaded when: - the model is a model provided by", "a directory in which a downloaded pre-trained model configuration should be cached if", "one forward pass through both the encoder and decoder; - During prediction, we", "model class that will be instantiated as a transformer architecture with one of", "be passed to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') #", "encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder = False", "do not correspond to any configuration attribute will be passed to the underlying", "is loaded by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON", "\" \" - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" -", "(`optional`) dict: an optional state dictionnary for the model to use instead of", "through the decoder to decode a full sequence. Therefore, we skip the forward", "`decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary of configuration parameters that will", "pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is currently supported.\") model = super(Model2Model,", "distributed under the License is distributed on an \"AS IS\" BASIS, # WITHOUT", "\"\"\" Tying the encoder and decoders' embeddings together. We need for each to", "super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def", "of a pretrained model), or - the model was saved using :func:`~transformers.PreTrainedModel.save_pretrained` and", "(re-)download the model weights and configuration files and override the cached versions if", "to initiate the encoder. Either: - a string with the `shortcut name` of", "self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None,", "= kwargs_encoder.pop(\"model\", None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder", "classes of the library from pre-trained model checkpoints. The model is set in", "by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those # that apply to", "made # by the value of the flag `is_decoder` that we need to", "The HuggingFace Inc. team. # # Licensed under the Apache License, Version 2.0", "loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in two separate", "of the library as encoder and (optionally) another one as decoder when created", "class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder", "you want to create a model from a pretrained configuration but load your", "present). It is possible to override this behavior and initialize, say, the decoder", "attribute will be used to override said attribute with the supplied ``kwargs`` value.", "weights file. This option can be used if you want to create a", "encoder if an argument named `encoder_hidden_state` is passed to this function. Params: encoder_input_ids:", "this file except in compliance with the License. # You may obtain a", "`decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to both encoders", "is found in the directory. state_dict: (`optional`) dict: an optional state dictionnary for", "} ) # Encode if needed (training, first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\",", "be directly passed to the underlying model's ``__init__`` method (we assume all relevant", "- If a configuration is provided with ``config``, ``**kwargs`` will be directly passed", "dict: an optional state dictionnary for the model to use instead of a", "state dictionnary for the model to use instead of a state dictionary loaded", "it is \"blocked\" by a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder)", "versions if they exists. proxies: (`optional`) dict, default None: A dictionary of proxy", "slower than converting the TensorFlow checkpoint in a PyTorch model using the provided", "the directory. state_dict: (`optional`) dict: an optional state dictionnary for the model to", "the decoder randomly by creating it beforehand as follows config = BertConfig.from_pretrained() decoder", "remaning positional arguments will be passed to the underlying model's ``__init__`` method config:", "Path to a directory in which a downloaded pre-trained model configuration should be", "dictionary of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128',", "# Load and initialize the encoder and decoder # The distinction between encoder", "derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of an automatically", "save directory. - the model is loaded by suppling a local directory as", "from .modeling_auto import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is", "{ argument[len(\"encoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"encoder_\") } )", "import torch from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger =", "= self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model` instantiates a", "e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request. output_loading_info:", "respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte", "``from_tf`` should be set to True and a configuration object should be provided", "missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword", "configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that corresponds to a", "of the XEmbedding layer for each model, but it is \"blocked\" by a", "configuration should be cached if the standard cache should not be used. force_download:", "distinction between encoder and decoder at the model level is made # by", "self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings together.", "``./my_model_directory/decoder``. - a path or url to a `tensorflow index checkpoint file` (e.g.", "decoder_input_ids, **kwargs): \"\"\" The forward pass on a seq2eq depends what we are", "in pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is", "in kwargs.items() if not argument.startswith(\"encoder_\") and not argument.startswith(\"decoder_\") } kwargs_decoder = kwargs_common.copy() kwargs_encoder", "the `shortcut name` of a pre-trained model to load from cache or download,", "and override the cached versions if they exists. proxies: (`optional`) dict, default None:", "under the License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\" from __future__ import", "License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division,", "a PyTorch model using the provided conversion scripts and loading the PyTorch model", "passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key of ``kwargs`` that", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to", "= decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates", "a whole. # We let the specific kwargs override the common ones in", "a configuration is not provided, ``kwargs`` will be first passed to the configuration", "used to update the configuration object (after it being loaded) and initiate the", "the PyTorch model afterwards. decoder_pretrained_model_name_or_path: information necessary to initiate the decoder. Either: -", "will be first passed to the configuration class initialization function (:func:`~transformers.PretrainedConfig.from_pretrained`). Each key", "self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward pass on a", "JSON file named `config.json` is found in the directory. state_dict: (`optional`) dict: an", "pretrained_model_name_or_path or \"distilbert\" in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is currently", ":func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to a `tensorflow index checkpoint", "beforehand as follows config = BertConfig.from_pretrained() decoder = BertForMaskedLM(config) model = Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder)", "`decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM,", "for argument, value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and initialize", "saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a path or url to a `tensorflow", "automatically loaded: - If a configuration is provided with ``config``, ``**kwargs`` will be", "in pretrained_model_name_or_path ): raise ValueError(\"Only the Bert model is currently supported.\") model =", "and a configuration JSON file named `config.json` is found in the directory. state_dict:", "required by applicable law or agreed to in writing, software # distributed under", "logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be", "save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and its configuration file in a", "set it back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to", "model_args: (`optional`) Sequence of positional arguments: All remaning positional arguments will be passed", "- a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a dictionary", "# by the value of the flag `is_decoder` that we need to set", "separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\"", "True and a configuration object should be provided as ``config`` argument. This loading", "decoder if \"decoder_config\" not in kwargs: raise ValueError( \"To load an LSTM in", "the encoder and the decoder will be initialized with the pretrained weight (the", "import AutoModel, AutoModelWithLMHead logger = logging.getLogger(__name__) class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic", "model classes of the library as encoder and (optionally) another one as decoder", "as encoder and (optionally) another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)`", "the model weights and configuration files and override the cached versions if they", "e.g.: ``./my_model_directory/encoder``. - a path or url to a `tensorflow index checkpoint file`", "You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "and decoder at the model level is made # by the value of", "conversion scripts and loading the PyTorch model afterwards. model_args: (`optional`) Sequence of positional", "\" - a torch.nn.LSTM model as `decoder_model` parameter (`decoder_model=lstm_model`), or\" \" - a", "should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir:", "exists. proxies: (`optional`) dict, default None: A dictionary of proxy servers to use", "kwargs will be passed to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased',", "is not provided, ``kwargs`` will be first passed to the configuration class initialization", "\"\"\" from __future__ import absolute_import, division, print_function, unicode_literals import logging import os import", "kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value in kwargs.items() if argument.startswith(\"encoder_\") }", "BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding layer for each model,", "function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence", "Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division, print_function, unicode_literals import logging import", "= kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states =", "method (we assume all relevant updates to the configuration have already been done)", "proxies are used on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to", "'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args, **kwargs) return model", "encoder. Either: - a string with the `shortcut name` of a pre-trained model", "kwargs_encoder = kwargs_common.copy() kwargs_encoder.update( { argument[len(\"encoder_\") :]: value for argument, value in kwargs.items()", "is passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices", "endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname': 'foo.bar:4012'}. The proxies are used on each request.", "this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input", "on each request. output_loading_info: (`optional`) boolean: Set to ``True`` to also return a", "force_download: (`optional`) boolean, default False: Force to (re-)download the model weights and configuration", "in the directory. state_dict: (`optional`) dict: an optional state dictionnary for the model", "pre-trained model checkpoints. The model is set in evaluation mode by default using", "from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs): if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in", "in evaluation mode by default using `model.eval()` (Dropout modules are deactivated) To train", "loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether a", "# you may not use this file except in compliance with the License.", "library from pre-trained model checkpoints. The model is set in evaluation mode by", "those # that apply to the model as a whole. # We let", "that apply to the model as a whole. # We let the specific", "Model2Model.from_pretrained('bert-base-uncased', decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self):", "converting the TensorFlow checkpoint in a PyTorch model using the provided conversion scripts", "using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory. - the model", "decoder) return model def save_pretrained(self, save_directory): \"\"\" Save a Seq2Seq model and its", "encoder and the decoder will be initialized with the pretrained weight (the cross-attention", ") decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder): r\"\"\" :class:`~transformers.Model2Model`", "and :func:`~transformers.PreTrainedModel.from_pretrained` is not a simpler option. cache_dir: (`optional`) string: Path to a", "from cache or download, e.g.: ``bert-base-uncased``. - a path to a `directory` containing", "output_loading_info: (`optional`) boolean: Set to ``True`` to also return a dictionnary containing missing", "Remaining dictionary of keyword arguments. \"\"\" # keyword arguments come in 3 flavors:", "value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for", "model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path or url to", "value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Encode if needed (training, first", "for each to get down to the embedding weights. However the different model", "(we assume all relevant updates to the configuration have already been done) -", "to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/encoder``. - a", "to the underlying model's ``__init__`` method config: (`optional`) instance of a class derived", "decoder.config.is_decoder = True model = cls(encoder, decoder) return model def save_pretrained(self, save_directory): \"\"\"", "should be provided as ``config`` argument. This loading path is slower than converting", "back in training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the", "whether a `config` is provided or automatically loaded: - If a configuration is", "model is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs )", "for the encoder and decoder by prefixing the key with `encoder_` and `decoder_`", "\"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids, decoder_input_ids, **kwargs): \"\"\" The forward", "License for the specific language governing permissions and # limitations under the License.", "the encoder's hidden state through the decoder to decode a full sequence. Therefore,", "file` (e.g. `./tf_model/model.ckpt.index`). In this case, ``from_tf`` should be set to True and", "(e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to both encoders and decoders.", "# We let the specific kwargs override the common ones in case of", "can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the encoder' and decoder's parameters in", "\"License\"); # you may not use this file except in compliance with the", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings -", "768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] = torch.nn.LSTM(kwargs.pop(\"decoder_config\")) model = super(Model2LSTM, cls).from_pretrained(*args, **kwargs) return", "to the model as whole. # We let the specific kwargs override the", "to initialize a\" \" torch.nn.LSTM model as `decoder_config` keyword argument. \" \" E.g.", "decoder randomly by creating it beforehand as follows config = BertConfig.from_pretrained() decoder =", "team. # # Licensed under the Apache License, Version 2.0 (the \"License\"); #", "Indices of decoder input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary", "If a configuration is provided with ``config``, ``**kwargs`` will be directly passed to", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing,", "saved weights file. This option can be used if you want to create", "in writing, software # distributed under the License is distributed on an \"AS", "used if you want to create a model from a pretrained configuration but", "\"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying", "you need to first set it back in training mode with `model.train()` Params:", "base classes of the library from pre-trained model checkpoints. The model is set", "import absolute_import, division, print_function, unicode_literals import logging import os import torch from torch", "argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"] =", "decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an encoder and a decoder from one", "`encoder_`), decoder-specific (prefixed by `decoder_`) and those # that apply to the model", "the Bert model is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args,", "kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and initialize the encoder and decoder", "- XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument", "unicode_literals import logging import os import torch from torch import nn from .modeling_auto", "\"blocked\" by a model-specific keyword (bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod", "be provided as ``config`` argument. This loading path is slower than converting the", "to the model as a whole. # We let the specific kwargs override", "we perform one forward pass through both the encoder and decoder; - During", "value for argument, value in kwargs.items() if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\")", "decode a full sequence. Therefore, we skip the forward pass on the encoder", "decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\" Instantiates an", "file in a format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We", "None) if encoder_hidden_states is None: encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0", "configuration object (after it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave", "decoder input sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword", "(`optional`) instance of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to", "generic model class that will be instantiated as a transformer architecture with one", "loading the PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments: All remaning", "**kwargs): if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\"", "`encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will be passed to", "limitations under the License. \"\"\" Classes to support Encoder-Decoder architectures \"\"\" from __future__", "is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model =", "as a whole. # We let the specific kwargs override the common ones", "the model as whole. # We let the specific kwargs override the common", "underlying model's ``__init__`` function. You can specify kwargs sepcific for the encoder and", "- GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings - RobertaForMaskedLM: roberta.embeddings.word_embeddings argument of the XEmbedding", "\"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs class Model2Model(PreTrainedEncoderDecoder):", "2.0 (the \"License\"); # you may not use this file except in compliance", "depends what we are performing: - During training we perform one forward pass", "initiate the decoder. Either: - a string with the `shortcut name` of a", "Seq2Seq model and its configuration file in a format such that it can", "The model is set in evaluation mode by default using `model.eval()` (Dropout modules", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the", "a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead of", "encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" #", "mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: -", "information necessary to initiate the decoder. Either: - a string with the `shortcut", "kwargs sepcific for the encoder and decoder by prefixing the key with `encoder_`", "# # Unless required by applicable law or agreed to in writing, software", "(bert, )... \"\"\" # self._tie_or_clone_weights(self.encoder, self.decoder) pass @classmethod def from_pretrained(cls, pretrained_model_name_or_path, *args, **kwargs):", "express or implied. # See the License for the specific language governing permissions", "value. Remaining keys that do not correspond to any configuration attribute will be", "``bert-base-uncased``. - a path to a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`,", "through the encoder, and then perform several forward passes with the encoder's hidden", "ValueError(\"Only the Bert model is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path,", "corresponds to a configuration attribute will be used to override said attribute with", "a format such that it can be loaded using `:func:`~transformers.PreTrainedEncoderDecoder.from_pretrained` We save the", "either express or implied. # See the License for the specific language governing", "Configuration can be automatically loaded when: - the model is a model provided", "r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be instantiated as a", "use instead of an automatically loaded configuation. Configuration can be automatically loaded when:", "# output the last layer hidden state else: encoder_outputs = () # Decode", "``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of encoder input sequence tokens in the", "supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs ) return model class", "transformer architecture with one of the base model classes of the library as", "*args, **kwargs ) return model class Model2LSTM(PreTrainedEncoderDecoder): @classmethod def from_pretrained(cls, *args, **kwargs): if", "with ``config``, ``**kwargs`` will be directly passed to the underlying model's ``__init__`` method", "weights. However the different model classes are inconsistent to that respect: - BertModel:", "a `directory` containing model weights saved using :func:`~transformers.PreTrainedModel.save_pretrained`, e.g.: ``./my_model_directory/decoder``. - a path", "name of or that path to a pretrained model is specified the encoder", "model is set in evaluation mode by default using `model.eval()` (Dropout modules are", "the License. # You may obtain a copy of the License at #", "unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary of keyword arguments. Can", "import logging import os import torch from torch import nn from .modeling_auto import", "in Encoder-Decoder model, please supply either: \" \" - a torch.nn.LSTM model as", "scripts and loading the PyTorch model afterwards. model_args: (`optional`) Sequence of positional arguments:", "a dictionnary containing missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining", "Bert model is currently supported.\") model = super(Model2Model, cls).from_pretrained( encoder_pretrained_model_name_or_path=pretrained_model_name_or_path, decoder_pretrained_model_name_or_path=pretrained_model_name_or_path, *args, **kwargs", "encoder_outputs = self.encoder(encoder_input_ids, **kwargs_encoder) encoder_hidden_states = encoder_outputs[ 0 ] # output the last", "and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert \"\"\" # keyword", "be passed to the underlying model's ``__init__`` method config: (`optional`) instance of a", "this case though, you should check if using :func:`~transformers.PreTrainedModel.save_pretrained` and :func:`~transformers.PreTrainedModel.from_pretrained` is not", "that apply to the model as whole. # We let the specific kwargs", "an encoder and a decoder from one or two base classes of the", "a `config` is provided or automatically loaded: - If a configuration is provided", "is specified the encoder and the decoder will be initialized with the pretrained", "in kwargs: raise ValueError( \"To load an LSTM in Encoder-Decoder model, please supply", "are inconsistent to that respect: - BertModel: embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel:", "``config`` argument. This loading path is slower than converting the TensorFlow checkpoint in", "(loaded with the ``shortcut-name`` string of a pretrained model), or - the model", "depending on whether a `config` is provided or automatically loaded: - If a", "= kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs + encoder_outputs", "\"\"\" Classes to support Encoder-Decoder architectures \"\"\" from __future__ import absolute_import, division, print_function,", "object (after it being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently", "self.decoder = decoder @classmethod def from_pretrained( cls, encoder_pretrained_model_name_or_path=None, decoder_pretrained_model_name_or_path=None, *model_args, **kwargs ): r\"\"\"", "passed to this function. Params: encoder_input_ids: ``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of", "by suppling a local directory as ``pretrained_model_name_or_path`` and a configuration JSON file named", "another one as decoder when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def", "kwargs_decoder[\"encoder_attention_mask\"] = kwargs_encoder.get( \"attention_mask\", None ) decoder_outputs = self.decoder(decoder_input_ids, **kwargs_decoder) return decoder_outputs +", "# # Licensed under the Apache License, Version 2.0 (the \"License\"); # you", "saved using :func:`~transformers.PreTrainedModel.save_pretrained` and is reloaded by suppling the save directory. - the", "The proxies are used on each request. output_loading_info: (`optional`) boolean: Set to ``True``", "decoder # The distinction between encoder and decoder at the model level is", "pretrained model is specified the encoder and the decoder will be initialized with", "being loaded) and initiate the model. (e.g. ``output_attention=True``). Behave differently depending on whether", "class PreTrainedEncoderDecoder(nn.Module): r\"\"\" :class:`~transformers.PreTrainedEncoderDecoder` is a generic model class that will be instantiated", "positional arguments: All remaning positional arguments will be passed to the underlying model's", "will be initialized with the pretrained weight (the cross-attention will be intialized randomly", "of a class derived from :class:`~transformers.PretrainedConfig`: Configuration for the model to use instead", "encoder and decoders' embeddings together. We need for each to get down to", "model using the provided conversion scripts and loading the PyTorch model afterwards. decoder_pretrained_model_name_or_path:", "self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and decoders' embeddings together. We need", "layer for each model, but it is \"blocked\" by a model-specific keyword (bert,", "``torch.LongTensor`` of shape ``(batch_size, sequence_length)`` Indices of decoder input sequence tokens in the", "decoder = kwargs_decoder.pop(\"model\", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path, **kwargs_decoder", "None) if encoder is None: encoder = AutoModel.from_pretrained( encoder_pretrained_model_name_or_path, *model_args, **kwargs_encoder ) encoder.config.is_decoder", "keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768, 'num_layers': 2}`\" ) kwargs[\"decoder_model\"]", "model provided by the library (loaded with the ``shortcut-name`` string of a pretrained", "argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value in kwargs.items()", "sequence tokens in the vocabulary. kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\"", "attribute will be passed to the underlying model's ``__init__`` function. You can specify", "value in kwargs.items() if argument.startswith(\"decoder_\") } ) # Load and initialize the encoder", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\")) self.decoder.save_pretrained(os.path.join(save_directory, \"decoder\")) def forward(self, encoder_input_ids,", "the library (loaded with the ``shortcut-name`` string of a pretrained model), or -", "using the provided conversion scripts and loading the PyTorch model afterwards. model_args: (`optional`)", "dictionnary containing missing keys, unexpected keys and error messages. kwargs: (`optional`) Remaining dictionary", "path to a pretrained model is specified the encoder and the decoder will", "used. force_download: (`optional`) boolean, default False: Force to (re-)download the model weights and", "`model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either: - a string", "kwargs: raise ValueError( \"To load an LSTM in Encoder-Decoder model, please supply either:", "**kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\" Tying the encoder and decoders'", "named `config.json` is found in the directory. state_dict: (`optional`) dict: an optional state", "case of conflict. kwargs_common = { argument: value for argument, value in kwargs.items()", "if argument.startswith(\"encoder_\") } ) kwargs_decoder.update( { argument[len(\"decoder_\") :]: value for argument, value in", "passes with the encoder's hidden state through the decoder to decode a full", "provided conversion scripts and loading the PyTorch model afterwards. model_args: (`optional`) Sequence of", "this behavior and initialize, say, the decoder randomly by creating it beforehand as", "we need to set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is None:", "library (loaded with the ``shortcut-name`` string of a pretrained model), or - the", "kwargs: (`optional`) Remaining dictionary of keyword arguments. \"\"\" # keyword arguments come in", "configuration files and override the cached versions if they exists. proxies: (`optional`) dict,", "which a downloaded pre-trained model configuration should be cached if the standard cache", "False decoder = kwargs_decoder.pop(\"model\", None) if decoder is None: decoder = AutoModelWithLMHead.from_pretrained( decoder_pretrained_model_name_or_path,", "embeddings.word_embeddings - RoBERTa: embeddings.word_embeddings - XLMModel: embeddings - GPT2: wte - BertForMaskedLM: bert.embeddings.word_embeddings", "os import torch from torch import nn from .modeling_auto import AutoModel, AutoModelWithLMHead logger", "except in compliance with the License. # You may obtain a copy of", "All remaning positional arguments will be passed to the underlying model's ``__init__`` method", "save the encoder' and decoder's parameters in two separate directories. \"\"\" self.encoder.save_pretrained(os.path.join(save_directory, \"encoder\"))", "In this case, ``from_tf`` should be set to True and a configuration object", "the decoder to decode a full sequence. Therefore, we skip the forward pass", "to both encoders and decoders. Examples:: model = PreTrainedEncoderDecoder.from_pretained('bert-base-uncased', 'bert-base-uncased') # initialize Bert2Bert", "for the model to use instead of an automatically loaded configuation. Configuration can", "If a configuration is not provided, ``kwargs`` will be first passed to the", "when created with the `AutoModel.from_pretrained(pretrained_model_name_or_path)` class method. \"\"\" def __init__(self, encoder, decoder): super(PreTrainedEncoderDecoder,", "may not use this file except in compliance with the License. # You", "License is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "passed to the underlying model's ``__init__`` method (we assume all relevant updates to", "object should be provided as ``config`` argument. This loading path is slower than", "parameters that will be used to initialize a\" \" torch.nn.LSTM model as `decoder_config`", "Therefore, we skip the forward pass on the encoder if an argument named", "create a randomly initilized LSTM model as decoder if \"decoder_config\" not in kwargs:", "keys that do not correspond to any configuration attribute will be passed to", "the underlying model's ``__init__`` function. You can specify kwargs sepcific for the encoder", "of proxy servers to use by protocol or endpoint, e.g.: {'http': 'foo.bar:3128', 'http://hostname':", "decoder_pretrained_model_name_or_path, **kwargs_decoder ) decoder.config.is_decoder = True model = cls(encoder, decoder) return model def", "torch.nn.LSTM model as `decoder_config` keyword argument. \" \" E.g. `decoder_config={'input_size': 768, 'hidden_size': 768,", "coding=utf-8 # Copyright 2018 The HuggingFace Inc. team. # # Licensed under the", "to (re-)download the model weights and configuration files and override the cached versions", "decoder_model=decoder) \"\"\" def __init__(self, *args, **kwargs): super(Model2Model, self).__init__(*args, **kwargs) self.tie_weights() def tie_weights(self): \"\"\"", "training mode with `model.train()` Params: encoder_pretrained_model_name_or_path: information necessary to initiate the encoder. Either:", "its weights are not present). It is possible to override this behavior and", "if they exists. proxies: (`optional`) dict, default None: A dictionary of proxy servers", "first prediction pass) encoder_hidden_states = kwargs_encoder.pop(\"hidden_states\", None) if encoder_hidden_states is None: encoder_outputs =", "return a dictionnary containing missing keys, unexpected keys and error messages. kwargs: (`optional`)", "the key with `encoder_` and `decoder_` respectively. (e.g. ``decoder_output_attention=True``). The remaining kwargs will", "if ( \"bert\" not in pretrained_model_name_or_path or \"roberta\" in pretrained_model_name_or_path or \"distilbert\" in", "- a dictionary of configuration parameters that will be used to initialize a\"", "encoder, decoder): super(PreTrainedEncoderDecoder, self).__init__() self.encoder = encoder self.decoder = decoder @classmethod def from_pretrained(", "cache_dir: (`optional`) string: Path to a directory in which a downloaded pre-trained model", "we perform one forward pass through the encoder, and then perform several forward", "the library from pre-trained model checkpoints. The model is set in evaluation mode", "set correctly. encoder = kwargs_encoder.pop(\"model\", None) if encoder is None: encoder = AutoModel.from_pretrained(", "encoder-specific (prefixed by # `encoder_`), decoder-specific (prefixed by `decoder_`) and those # that", "directory as ``pretrained_model_name_or_path`` and a configuration JSON file named `config.json` is found in" ]
[ "= default_args, schedule_interval='*/20 * * * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl", "PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female >> [branch_homem, branch_mulher]", "18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1)", "'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag", "/usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m,", "timedelta import pandas as pd import random # Default args definition default_args =", "dag = DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula idade media para", "homens ou mulheres\", default_args = default_args, schedule_interval='*/20 * * * *' ) get_data", "as pd import random # Default args definition default_args = { 'owner': 'Rafael',", "1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03', description=\"Extrai dados", "def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def", "'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False,", "{med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv')", "datetime, timedelta import pandas as pd import random # Default args definition default_args", "definition dag = DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula idade media", "= pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no", "['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag", "Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11,", "{ 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>',", "'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'],", "pandas as pd import random # Default args definition default_args = { 'owner':", "from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime,", "BranchPythonOperator from datetime import datetime, timedelta import pandas as pd import random #", "schedule_interval='*/20 * * * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o", "python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex ==", "get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return", "default_args = default_args, schedule_interval='*/20 * * * *' ) get_data = BashOperator( task_id='get-data',", "from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas as", "import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas as pd import", "task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem'", "== 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator(", "'<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition", "import random # Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False,", "import pandas as pd import random # Default args definition default_args = {", "mulheres\", default_args = default_args, schedule_interval='*/20 * * * *' ) get_data = BashOperator(", "= df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher", "False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry':", "df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres", "PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas as pd import random", "no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m", "de idade dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag", "value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator(", "-o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m',", "= PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female >> [branch_homem,", "'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value", "Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df =", "from datetime import datetime, timedelta import pandas as pd import random # Default", "False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03',", "df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens", "mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das", "= BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med", "dados do Titanic e calcula idade media para homens ou mulheres\", default_args =", ") def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de", "df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher =", "print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher,", "BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med =", "de idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag", "'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG(", "random # Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date':", "'treino-03', description=\"Extrai dados do Titanic e calcula idade media para homens ou mulheres\",", "do Titanic e calcula idade media para homens ou mulheres\", default_args = default_args,", "if value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional',", "pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no Titanic:", "default_args, schedule_interval='*/20 * * * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv", "Titanic e calcula idade media para homens ou mulheres\", default_args = default_args, schedule_interval='*/20", "task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m", "df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}') branch_homem =", "datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries':", "'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def", "BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female'])", "pd import random # Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past':", "= DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula idade media para homens", "'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df =", "def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female", "print(f'Media de idade dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem,", "20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) }", "random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if", "def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade", "# Default args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020,", "dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media", "media para homens ou mulheres\", default_args = default_args, schedule_interval='*/20 * * * *'", "== 'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True,", "# Dag definition dag = DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula", "args definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29,", "PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return", "return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df", "'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False,", "airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas as pd", "return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag )", "dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag", "med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}')", "task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex", "= PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male':", "else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem():", "* * * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv',", "= df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}') branch_homem", "False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag =", "*' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def", "return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m')", "= { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email':", "import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import", "29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay':", "= pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos homens no", "BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta import pandas", "11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1,", "para homens ou mulheres\", default_args = default_args, schedule_interval='*/20 * * * *' )", "MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female =", "no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df", "* *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag )", "'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20), 'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure':", "= BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male',", "'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag", "datetime import datetime, timedelta import pandas as pd import random # Default args", "'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem',", "bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m =", "airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from", "'email': ['<EMAIL>', '<EMAIL>'], 'email_on_failure': False, 'email_on_retry': False, 'retries': 1, 'Retry_delay': timedelta(minutes=1) } #", "DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula idade media para homens ou", "escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value ==", "branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female >>", "} # Dag definition dag = DAG( 'treino-03', description=\"Extrai dados do Titanic e", "python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean()", "dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media", "mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >>", ") def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag )", "value == 'male': return 'branch_homem' else: return 'branch_mulher' male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF,", "mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de idade dos", "PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex", "== 'male'].Age.mean() print(f'Media de idade dos homens no Titanic: {med}') branch_homem = PythonOperator(", "{med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >> male_female", "pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic:", "homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher():", "e calcula idade media para homens ou mulheres\", default_args = default_args, schedule_interval='*/20 *", "branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med", "male_female = BranchPythonOperator( task_id='condicional', python_callable=MouF, provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv')", "airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import datetime, timedelta", "'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher',", "import datetime, timedelta import pandas as pd import random # Default args definition", "def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade", "Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data >> escolhe_h_m >>", "Dag definition dag = DAG( 'treino-03', description=\"Extrai dados do Titanic e calcula idade", "definition default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18,", "sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator( task_id='escolhe-h-m', python_callable=sorteia_h_m, dag=dag ) def MouF(**context):", "from airflow import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator", "python_callable=sorteia_h_m, dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else:", "'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03', description=\"Extrai dados do", "med = df.loc[df.Sex == 'female'].Age.mean() print(f'Media de idade das mulheres no Titanic: {med}')", "timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03', description=\"Extrai dados do Titanic", "provide_context=True, dag=dag ) def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean()", "description=\"Extrai dados do Titanic e calcula idade media para homens ou mulheres\", default_args", "ou mulheres\", default_args = default_args, schedule_interval='*/20 * * * *' ) get_data =", "idade das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag )", "idade media para homens ou mulheres\", default_args = default_args, schedule_interval='*/20 * * *", "default_args = { 'owner': 'Rafael', 'depends_on_past': False, 'start_date': datetime(2020, 11, 29, 18, 20),", ") def mean_homem(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex == 'male'].Age.mean() print(f'Media de", "dag=dag ) def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return", "= PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med =", "'retries': 1, 'Retry_delay': timedelta(minutes=1) } # Dag definition dag = DAG( 'treino-03', description=\"Extrai", "dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag ) def", "https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m(): return random.choice(['male', 'female']) escolhe_h_m = PythonOperator(", ") get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag ) def sorteia_h_m():", ") def MouF(**context): value=context['task_instance'].xcom_pull(task_ids='escolhe-h-m') if value == 'male': return 'branch_homem' else: return 'branch_mulher'", "idade dos homens no Titanic: {med}') branch_homem = PythonOperator( task_id='branch_homem', python_callable=mean_homem, dag=dag )", "das mulheres no Titanic: {med}') branch_mulher = PythonOperator( task_id='branch_mulher', python_callable=mean_mulher, dag=dag ) get_data", "DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime import", "task_id='branch_homem', python_callable=mean_homem, dag=dag ) def mean_mulher(): df = pd.read_csv('/usr/local/airflow/data/train.csv') med = df.loc[df.Sex ==", "* * *' ) get_data = BashOperator( task_id='get-data', bash_command='curl https://raw.githubusercontent.com/A3Data/hermione/master/hermione/file_text/train.csv -o /usr/local/airflow/data/train.csv', dag=dag", "calcula idade media para homens ou mulheres\", default_args = default_args, schedule_interval='*/20 * *", "import DAG from airflow.operators.bash_operator import BashOperator from airflow.operators.python_operator import PythonOperator, BranchPythonOperator from datetime" ]
[ "description = '%s %s' % (\"--\", \"Outputs in Tcl Shell the list with", "args: argument = args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else:", "# Dictionary of types from Tcl command, needs to be ordered arg_names =", "Tcl command, needs to be ordered , this is for options like -optionname", "'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\" :param", "Tcl Shell the list with the names of system variables.\") # Dictionary of", "= '%s %s' % (\"--\", \"Outputs in Tcl Shell the list with the", "'%s %s' % (\"--\", \"Outputs in Tcl Shell the list with the names", "-optionname value option_types = collections.OrderedDict([ ]) # array of mandatory options for current", "or excellon or geometry or cncjob or global.\\n\" \"Note: Use 'get_sys system variable'", "variable' to get the value and 'set_sys system variable value' to set it.\\n\",", "the list of system variables example: list_sys \"\"\" # List of all command", "current command, args needs to be ordered help = { 'main': \"Returns the", ":param unnamed_args: :return: \"\"\" if 'selection' in args: argument = args['selection'] return str([k", "\"Outputs in Tcl Shell the list with the names of system variables.\") #", "with that string.\\n\" \"Main categories start with: gerber or excellon or geometry or", "MIT Licence # # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl", "with the names of system variables.\") # Dictionary of types from Tcl command,", "to get the list of system variables example: list_sys \"\"\" # List of", "(add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s' % (\"--\", \"Outputs", "letters from the name \" \"of the system variable.\\n\" \"In that case it", "of types from Tcl command, needs to be ordered , this is for", "]) # array of mandatory options for current Tcl command: required = {'name','outname'}", "structured help for current command, args needs to be ordered help = {", "an argument use first letter or first letters from the name \" \"of", "value and 'set_sys system variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples':", "= { 'main': \"Returns the list of the names of system variables.\\n\" \"Without", "str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val)", "system variable.\\n\" \"In that case it will list only the system variables that", "needs to be ordered , this is for options like -optionname value option_types", "Dictionary of types from Tcl command, needs to be ordered , this is", "<NAME> (c) # # Date: 8/17/2019 # # MIT Licence # # ##########################################################", "value option_types = collections.OrderedDict([ ]) # array of mandatory options for current Tcl", "# Date: 8/17/2019 # # MIT Licence # # ########################################################## from tclCommands.TclCommand import", "collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from Tcl command, needs to", "the list of the names of system variables.\\n\" \"Without an argument it will", "variables.\") # Dictionary of types from Tcl command, needs to be ordered arg_names", "of the names of system variables.\\n\" \"Without an argument it will list all", "or cncjob or global.\\n\" \"Note: Use 'get_sys system variable' to get the value", "the value and 'set_sys system variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]),", "in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val) # return str([*self.app.defaults])", "value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber',", "%s' % (\"--\", \"Outputs in Tcl Shell the list with the names of", "of mandatory options for current Tcl command: required = {'name','outname'} required = []", "with: gerber or excellon or geometry or cncjob or global.\\n\" \"Note: Use 'get_sys", "\"of the system variable.\\n\" \"In that case it will list only the system", "'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\" :param args: :param", "Tcl command: required = {'name','outname'} required = [] # structured help for current", "for current Tcl command: required = {'name','outname'} required = [] # structured help", "cncj'] } def execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\"", "the list with the names of system variables.\") # Dictionary of types from", "of types from Tcl command, needs to be ordered arg_names = collections.OrderedDict([ ('selection',", "shell command to get the list of system variables example: list_sys \"\"\" #", "excellon or geometry or cncjob or global.\\n\" \"Note: Use 'get_sys system variable' to", "\"In that case it will list only the system variables that starts with", "# MIT Licence # # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\"", "command: required = {'name','outname'} required = [] # structured help for current command,", "Tcl command, needs to be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) #", "options for current Tcl command: required = {'name','outname'} required = [] # structured", "the names of system variables.\\n\" \"Without an argument it will list all the", "'main': \"Returns the list of the names of system variables.\\n\" \"Without an argument", "the system variable.\\n\" \"In that case it will list only the system variables", "global.\\n\" \"Note: Use 'get_sys system variable' to get the value and 'set_sys system", "backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s' %", "'selection' in args: argument = args['selection'] return str([k for k in self.app.defaults.keys() if", "# # Date: 8/17/2019 # # MIT Licence # # ########################################################## from tclCommands.TclCommand", "is for options like -optionname value option_types = collections.OrderedDict([ ]) # array of", "list with the names of system variables.\") # Dictionary of types from Tcl", "Post-processing for Manufacturing # # File Author: <NAME> (c) # # Date: 8/17/2019", "that case it will list only the system variables that starts with that", "Use 'get_sys system variable' to get the value and 'set_sys system variable value'", "list all the system parameters. \" \"As an argument use first letter or", "be ordered , this is for options like -optionname value option_types = collections.OrderedDict([", "list_sys \"\"\" # List of all command aliases, to be able use old", "starts with that string.\\n\" \"Main categories start with: gerber or excellon or geometry", "system variable' to get the value and 'set_sys system variable value' to set", "\"As an argument use first letter or first letters from the name \"", "categories start with: gerber or excellon or geometry or cncjob or global.\\n\" \"Note:", "Tcl shell command to get the list of system variables example: list_sys \"\"\"", "gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args:", "from Tcl command, needs to be ordered , this is for options like", "help for current command, args needs to be ordered help = { 'main':", "command, needs to be ordered , this is for options like -optionname value", "only the system variables that starts with that string.\\n\" \"Main categories start with:", "ordered help = { 'main': \"Returns the list of the names of system", "to be ordered help = { 'main': \"Returns the list of the names", "use old names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description", "= collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from Tcl command, needs", "array of mandatory options for current Tcl command: required = {'name','outname'} required =", "unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\" if 'selection' in args: argument", "collections.OrderedDict([ ]) # array of mandatory options for current Tcl command: required =", "or geometry or cncjob or global.\\n\" \"Note: Use 'get_sys system variable' to get", "# array of mandatory options for current Tcl command: required = {'name','outname'} required", "use first letter or first letters from the name \" \"of the system", "and 'set_sys system variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys',", "arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from Tcl command,", "variables that starts with that string.\\n\" \"Main categories start with: gerber or excellon", "<filename>tclCommands/TclCommandListSys.py # ########################################################## # FlatCAM: 2D Post-processing for Manufacturing # # File Author:", "# structured help for current command, args needs to be ordered help =", "name \" \"of the system variable.\\n\" \"In that case it will list only", "it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] }", "args: :param unnamed_args: :return: \"\"\" if 'selection' in args: argument = args['selection'] return", "'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def", "system parameters. \" \"As an argument use first letter or first letters from", "the system parameters. \" \"As an argument use first letter or first letters", "command, needs to be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary", "list only the system variables that starts with that string.\\n\" \"Main categories start", "it will list all the system parameters. \" \"As an argument use first", ":param args: :param unnamed_args: :return: \"\"\" if 'selection' in args: argument = args['selection']", "Date: 8/17/2019 # # MIT Licence # # ########################################################## from tclCommands.TclCommand import *", "tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the list", "variables example: list_sys \"\"\" # List of all command aliases, to be able", "Dictionary of types from Tcl command, needs to be ordered arg_names = collections.OrderedDict([", "(\"--\", \"Outputs in Tcl Shell the list with the names of system variables.\")", "argument it will list all the system parameters. \" \"As an argument use", "########################################################## # FlatCAM: 2D Post-processing for Manufacturing # # File Author: <NAME> (c)", "# # File Author: <NAME> (c) # # Date: 8/17/2019 # # MIT", "execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\" if 'selection' in", "the system variables that starts with that string.\\n\" \"Main categories start with: gerber", "[] # structured help for current command, args needs to be ordered help", "{ 'main': \"Returns the list of the names of system variables.\\n\" \"Without an", "def execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\" if 'selection'", "get the list of system variables example: list_sys \"\"\" # List of all", "['list_sys', 'listsys'] description = '%s %s' % (\"--\", \"Outputs in Tcl Shell the", "an argument it will list all the system parameters. \" \"As an argument", "will list all the system parameters. \" \"As an argument use first letter", "the name \" \"of the system variable.\\n\" \"In that case it will list", "of system variables.\") # Dictionary of types from Tcl command, needs to be", "this is for options like -optionname value option_types = collections.OrderedDict([ ]) # array", "= [] # structured help for current command, args needs to be ordered", "aliases, to be able use old names for backward compatibility (add_poly, add_polygon) aliases", "variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys", "in args: argument = args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))])", "k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val) # return", "types from Tcl command, needs to be ordered arg_names = collections.OrderedDict([ ('selection', str),", "]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args,", "letter or first letters from the name \" \"of the system variable.\\n\" \"In", "= ['list_sys', 'listsys'] description = '%s %s' % (\"--\", \"Outputs in Tcl Shell", "types from Tcl command, needs to be ordered , this is for options", "all the system parameters. \" \"As an argument use first letter or first", "# Dictionary of types from Tcl command, needs to be ordered , this", "or global.\\n\" \"Note: Use 'get_sys system variable' to get the value and 'set_sys", "system variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser',", "('selection', str), ]) # Dictionary of types from Tcl command, needs to be", "File Author: <NAME> (c) # # Date: 8/17/2019 # # MIT Licence #", "collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self,", "for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return str(ret_val) #", "for current command, args needs to be ordered help = { 'main': \"Returns", "start with: gerber or excellon or geometry or cncjob or global.\\n\" \"Note: Use", ", this is for options like -optionname value option_types = collections.OrderedDict([ ]) #", "= collections.OrderedDict([ ]) # array of mandatory options for current Tcl command: required", "that starts with that string.\\n\" \"Main categories start with: gerber or excellon or", "unnamed_args: :return: \"\"\" if 'selection' in args: argument = args['selection'] return str([k for", "\"Main categories start with: gerber or excellon or geometry or cncjob or global.\\n\"", "string.\\n\" \"Main categories start with: gerber or excellon or geometry or cncjob or", "set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj']", "system variables example: list_sys \"\"\" # List of all command aliases, to be", "that string.\\n\" \"Main categories start with: gerber or excellon or geometry or cncjob", "['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\"", "} def execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\" if", "args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys())", "needs to be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of", "options like -optionname value option_types = collections.OrderedDict([ ]) # array of mandatory options", "gerber or excellon or geometry or cncjob or global.\\n\" \"Note: Use 'get_sys system", "(c) # # Date: 8/17/2019 # # MIT Licence # # ########################################################## from", "command to get the list of system variables example: list_sys \"\"\" # List", "will list only the system variables that starts with that string.\\n\" \"Main categories", "2D Post-processing for Manufacturing # # File Author: <NAME> (c) # # Date:", "example: list_sys \"\"\" # List of all command aliases, to be able use", "import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the list of", "'set_sys system variable value' to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys", "or first letters from the name \" \"of the system variable.\\n\" \"In that", "mandatory options for current Tcl command: required = {'name','outname'} required = [] #", "\"Returns the list of the names of system variables.\\n\" \"Without an argument it", "system variables.\\n\" \"Without an argument it will list all the system parameters. \"", "cncjob or global.\\n\" \"Note: Use 'get_sys system variable' to get the value and", "\"\"\" :param args: :param unnamed_args: :return: \"\"\" if 'selection' in args: argument =", "= {'name','outname'} required = [] # structured help for current command, args needs", "Licence # # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell", "be ordered help = { 'main': \"Returns the list of the names of", "ordered , this is for options like -optionname value option_types = collections.OrderedDict([ ])", "ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from Tcl", "to be ordered , this is for options like -optionname value option_types =", "get the value and 'set_sys system variable value' to set it.\\n\", 'args': collections.OrderedDict([", "for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s'", "# # MIT Licence # # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand):", "'get_sys system variable' to get the value and 'set_sys system variable value' to", "add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s' % (\"--\", \"Outputs in", "required = [] # structured help for current command, args needs to be", "help = { 'main': \"Returns the list of the names of system variables.\\n\"", "argument use first letter or first letters from the name \" \"of the", "from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the", "system variables.\") # Dictionary of types from Tcl command, needs to be ordered", "aliases = ['list_sys', 'listsys'] description = '%s %s' % (\"--\", \"Outputs in Tcl", "current Tcl command: required = {'name','outname'} required = [] # structured help for", "to set it.\\n\", 'args': collections.OrderedDict([ ]), 'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys", "= args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val =", "argument = args['selection'] return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val", "TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the list of system variables example:", "FlatCAM: 2D Post-processing for Manufacturing # # File Author: <NAME> (c) # #", "names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s", "required = {'name','outname'} required = [] # structured help for current command, args", "for options like -optionname value option_types = collections.OrderedDict([ ]) # array of mandatory", "'listsys'] description = '%s %s' % (\"--\", \"Outputs in Tcl Shell the list", "it will list only the system variables that starts with that string.\\n\" \"Main", "to be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of types", "system variables that starts with that string.\\n\" \"Main categories start with: gerber or", "of all command aliases, to be able use old names for backward compatibility", "be ordered arg_names = collections.OrderedDict([ ('selection', str), ]) # Dictionary of types from", "for Manufacturing # # File Author: <NAME> (c) # # Date: 8/17/2019 #", "Manufacturing # # File Author: <NAME> (c) # # Date: 8/17/2019 # #", "names of system variables.\") # Dictionary of types from Tcl command, needs to", "option_types = collections.OrderedDict([ ]) # array of mandatory options for current Tcl command:", "# ########################################################## # FlatCAM: 2D Post-processing for Manufacturing # # File Author: <NAME>", "variable.\\n\" \"In that case it will list only the system variables that starts", "# # ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command", "case it will list only the system variables that starts with that string.\\n\"", "\"\"\" if 'selection' in args: argument = args['selection'] return str([k for k in", "]) # Dictionary of types from Tcl command, needs to be ordered ,", "needs to be ordered help = { 'main': \"Returns the list of the", "like -optionname value option_types = collections.OrderedDict([ ]) # array of mandatory options for", "list of the names of system variables.\\n\" \"Without an argument it will list", "of system variables.\\n\" \"Without an argument it will list all the system parameters.", "Shell the list with the names of system variables.\") # Dictionary of types", "\" \"of the system variable.\\n\" \"In that case it will list only the", "% (\"--\", \"Outputs in Tcl Shell the list with the names of system", "be able use old names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys',", "return str([k for k in self.app.defaults.keys() if str(k).startswith(str(argument))]) else: ret_val = list(self.app.defaults.keys()) return", "compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description = '%s %s' % (\"--\",", "variables.\\n\" \"Without an argument it will list all the system parameters. \" \"As", "parameters. \" \"As an argument use first letter or first letters from the", "\"Note: Use 'get_sys system variable' to get the value and 'set_sys system variable", "\"\"\" Tcl shell command to get the list of system variables example: list_sys", "'examples': ['list_sys', 'list_sys ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args):", "ser', 'list_sys gerber', 'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\" :param args:", "first letters from the name \" \"of the system variable.\\n\" \"In that case", ":return: \"\"\" if 'selection' in args: argument = args['selection'] return str([k for k", "\"Without an argument it will list all the system parameters. \" \"As an", "the names of system variables.\") # Dictionary of types from Tcl command, needs", "{'name','outname'} required = [] # structured help for current command, args needs to", "\"\"\" # List of all command aliases, to be able use old names", "# File Author: <NAME> (c) # # Date: 8/17/2019 # # MIT Licence", "str), ]) # Dictionary of types from Tcl command, needs to be ordered", "command, args needs to be ordered help = { 'main': \"Returns the list", "to get the value and 'set_sys system variable value' to set it.\\n\", 'args':", "in Tcl Shell the list with the names of system variables.\") # Dictionary", "names of system variables.\\n\" \"Without an argument it will list all the system", "geometry or cncjob or global.\\n\" \"Note: Use 'get_sys system variable' to get the", "args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return: \"\"\" if 'selection' in args:", "# FlatCAM: 2D Post-processing for Manufacturing # # File Author: <NAME> (c) #", "of system variables example: list_sys \"\"\" # List of all command aliases, to", "first letter or first letters from the name \" \"of the system variable.\\n\"", "class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the list of system variables", "########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get", "old names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys'] description =", "list of system variables example: list_sys \"\"\" # List of all command aliases,", "List of all command aliases, to be able use old names for backward", "command aliases, to be able use old names for backward compatibility (add_poly, add_polygon)", "args needs to be ordered help = { 'main': \"Returns the list of", "'list_sys cncj'] } def execute(self, args, unnamed_args): \"\"\" :param args: :param unnamed_args: :return:", "\" \"As an argument use first letter or first letters from the name", "able use old names for backward compatibility (add_poly, add_polygon) aliases = ['list_sys', 'listsys']", "from the name \" \"of the system variable.\\n\" \"In that case it will", "8/17/2019 # # MIT Licence # # ########################################################## from tclCommands.TclCommand import * class", "* class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to get the list of system", "from Tcl command, needs to be ordered arg_names = collections.OrderedDict([ ('selection', str), ])", "Author: <NAME> (c) # # Date: 8/17/2019 # # MIT Licence # #", "all command aliases, to be able use old names for backward compatibility (add_poly,", "# List of all command aliases, to be able use old names for", "# ########################################################## from tclCommands.TclCommand import * class TclCommandListSys(TclCommand): \"\"\" Tcl shell command to", "if 'selection' in args: argument = args['selection'] return str([k for k in self.app.defaults.keys()", "to be able use old names for backward compatibility (add_poly, add_polygon) aliases =" ]