text
stringlengths
28
881k
from asyncio import FutureNEWLINEfrom math import ceilNEWLINEfrom unittest.mock import MagicMockNEWLINEfrom typing import Sequence, Dict, Optional, TypeNEWLINENEWLINEimport pytestNEWLINEfrom _pytest.monkeypatch import MonkeyPatchNEWLINEfrom marshmallow_jsonapi import fieldsNEWLINEfrom starlette.applications import StarletteNEWLINEfrom starlette.requests import URLNEWLINEfrom starlette.responses import ResponseNEWLINEfrom starlette.testclient import TestClientNEWLINENEWLINEfrom starlette_jsonapi import metaNEWLINEfrom starlette_jsonapi.resource import BaseResourceNEWLINEfrom starlette_jsonapi.pagination import (NEWLINE BasePagination, BasePageNumberPagination,NEWLINE BaseCursorPagination, BaseOffsetPaginationNEWLINE)NEWLINEfrom starlette_jsonapi.schema import JSONAPISchemaNEWLINENEWLINENEWLINEdef test_process_query_params_called_on_init(monkeypatch: MonkeyPatch):NEWLINE paginator = BasePagination(request=MagicMock(), data=[])NEWLINE assert paginator.process_query_params() is NoneNEWLINENEWLINE process_query_params_mock = MagicMock()NEWLINE monkeypatch.setattr(BasePagination, 'process_query_params', process_query_params_mock)NEWLINE BasePagination(request=MagicMock(), data=[])NEWLINE assert process_query_params_mock.calledNEWLINENEWLINENEWLINEdef test_unimplemented_slice_throws_error():NEWLINE class TPagination(BasePagination):NEWLINE passNEWLINENEWLINE paginator = TPagination(request=MagicMock(), data=[])NEWLINE with pytest.raises(NotImplementedError):NEWLINE paginator.get_pagination()NEWLINENEWLINENEWLINEdef test_unimplemented_generate_pagination_links():NEWLINE class TPagination(BasePagination):NEWLINE def slice_data(self, params: dict = None) -> Sequence:NEWLINE return self.dataNEWLINENEWLINE paginator = TPagination(request=MagicMock(), data=[1, 2, 3])NEWLINE data, links = paginator.get_pagination()NEWLINE assert links == {}NEWLINENEWLINENEWLINEdef test_base_page_number_pagination_process_query_params():NEWLINE # test initialization on specified valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[number]': 1, 'page[size]': 1}NEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_number == 1NEWLINE assert paginator.page_size == 1NEWLINENEWLINE # test initialization for default valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {}NEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_number == paginator.default_page_numberNEWLINE assert paginator.page_size == paginator.default_page_sizeNEWLINENEWLINE # test negative page falls back to defaultNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[number]': -1}NEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_number == paginator.default_page_numberNEWLINENEWLINENEWLINEdef test_base_page_number_pagination_create_pagination_link():NEWLINE url = URL('http://testserver/test-resource')NEWLINE request = MagicMock()NEWLINE request.url = urlNEWLINENEWLINE paginator = BasePageNumberPagination(request=request, data=[])NEWLINE link = paginator.create_pagination_link(page_number=2, page_size=4)NEWLINE assert link == 'http://testserver/test-resource?page%5Bnumber%5D=2&page%5Bsize%5D=4'NEWLINENEWLINENEWLINEdef test_base_offset_pagination_process_query_params():NEWLINE # test initialization on specified valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[offset]': 1, 'page[size]': 1}NEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_offset == 1NEWLINE assert paginator.page_size == 1NEWLINENEWLINE # test initialization for default valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {}NEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_offset == paginator.default_page_offsetNEWLINE assert paginator.page_size == paginator.default_page_sizeNEWLINENEWLINE # test negative offset falls back to defaultNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[offset]': -1}NEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_offset == paginator.default_page_offsetNEWLINENEWLINENEWLINEdef test_base_offset_pagination_create_pagination_link():NEWLINE url = URL('http://testserver/test-resource')NEWLINE request = MagicMock()NEWLINE request.url = urlNEWLINENEWLINE paginator = BaseOffsetPagination(request=request, data=[])NEWLINE link = paginator.create_pagination_link(page_offset=35, page_size=4)NEWLINE assert link == 'http://testserver/test-resource?page%5Boffset%5D=35&page%5Bsize%5D=4'NEWLINENEWLINENEWLINEdef test_base_cursor_pagination_process_query_params():NEWLINE # test initialization on specified valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {'page[after]': 2, 'page[before]': 4, 'page[size]': 1}NEWLINE paginator = BaseCursorPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_before == 4NEWLINE assert paginator.page_after == 2NEWLINE assert paginator.page_size == 1NEWLINENEWLINE # test initialization for default valuesNEWLINE request = MagicMock()NEWLINE request.query_params = {}NEWLINE paginator = BaseCursorPagination(request=request, data=[])NEWLINENEWLINE assert paginator.page_before == paginator.default_page_beforeNEWLINE assert paginator.page_after == paginator.default_page_afterNEWLINE assert paginator.page_size == paginator.default_page_sizeNEWLINENEWLINENEWLINEdef test_base_cursor_pagination_create_pagination_link():NEWLINE url = URL('http://testserver/test-resource')NEWLINE request = MagicMock()NEWLINE request.url = urlNEWLINENEWLINE paginator = BaseCursorPagination(request=request, data=[])NEWLINE link = paginator.create_pagination_link(page_after=2, page_before=6, page_size=4)NEWLINE assert link == 'http://testserver/test-resource?page%5Bsize%5D=4&page%5Bafter%5D=2&page%5Bbefore%5D=6'NEWLINENEWLINENEWLINE@pytest.fixture()NEWLINEdef pagination_app(app: Starlette):NEWLINE class TPagination(BasePageNumberPagination):NEWLINE default_page_size = 2NEWLINENEWLINE def process_query_params(self):NEWLINE super(TPagination, self).process_query_params()NEWLINENEWLINE def slice_data(self, params: dict = None) -> Sequence:NEWLINE data = self.data[(self.page_number - 1) * self.page_size: self.page_number * self.page_size]NEWLINE return dataNEWLINENEWLINE def generate_pagination_links(self, params: dict = None) -> Dict[str, Optional[str]]:NEWLINE links = dict(first=None, next=None, prev=None, last=None) # type: Dict[str, Optional[str]]NEWLINE page_count = ceil(len(self.data) / self.page_size)NEWLINENEWLINE # firstNEWLINE links['first'] = self.create_pagination_link(page_number=1, page_size=self.page_size)NEWLINENEWLINE # lastNEWLINE links['last'] = self.create_pagination_link(page_number=page_count, page_size=self.page_size)NEWLINENEWLINE # nextNEWLINE has_next = self.page_number < page_countNEWLINE if has_next:NEWLINE links['next'] = self.create_pagination_link(page_number=self.page_number + 1, page_size=self.page_size)NEWLINENEWLINE # previousNEWLINE has_prev = self.page_number > 1NEWLINE if has_prev:NEWLINE links['prev'] = self.create_pagination_link(page_number=self.page_number - 1, page_size=self.page_size)NEWLINENEWLINE return linksNEWLINENEWLINE class TSchema(JSONAPISchema):NEWLINE id = fields.Str(dump_only=True)NEWLINE name = fields.Str()NEWLINENEWLINE class Meta:NEWLINE type_ = 'test-resource'NEWLINENEWLINE class TResource(BaseResource):NEWLINE type_ = 'test-resource'NEWLINE schema = TSchemaNEWLINE pagination_class = TPaginationNEWLINENEWLINE async def get_many(self, *args, **kwargs) -> Response:NEWLINE data = [NEWLINE dict(id=1, name='foo'),NEWLINE dict(id=2, name='foo'),NEWLINE dict(id=3, name='foo'),NEWLINE dict(id=4, name='foo')NEWLINE ]NEWLINE return await self.to_response(await self.serialize(data, many=True, paginate=True))NEWLINENEWLINE async def get(self, id=None, *args, **kwargs) -> Response:NEWLINE return await self.to_response(await self.serialize(dict(id=id, name='foo')))NEWLINENEWLINE async def post(self, *args, **kwargs) -> Response:NEWLINE return await self.to_response(await self.serialize(dict(id=id, name='foo')))NEWLINENEWLINE async def patch(self, id=None, *args, **kwargs) -> Response:NEWLINE return await self.to_response(await self.serialize(dict(id=id, name='foo')))NEWLINENEWLINE async def delete(self, id=None, *args, **kwargs) -> Response:NEWLINE return await self.to_response({})NEWLINENEWLINE TResource.register_routes(app, '/')NEWLINE return appNEWLINENEWLINENEWLINEdef test_get_many_calls_pagination(pagination_app: Starlette, monkeypatch: MonkeyPatch):NEWLINE test_client = TestClient(app=pagination_app)NEWLINE paginate_request_mock = MagicMock(return_value=Future())NEWLINENEWLINE object_list = [dict(id=1, name='foo')]NEWLINE links = {'first': 'first', 'next': 'next'}NEWLINE paginate_request_mock.return_value.set_result((object_list, links))NEWLINENEWLINE monkeypatch.setattr(BaseResource, 'paginate_request', paginate_request_mock)NEWLINE rv = test_client.get('/test-resource/')NEWLINE assert paginate_request_mock.called_with(object_list)NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '1',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'first',NEWLINE 'next': 'next'NEWLINE }NEWLINE }NEWLINENEWLINENEWLINEdef test_get_many_without_pagination_class(pagination_app: Starlette):NEWLINE resource = meta.registered_resources['TResource'] # type: Type[BaseResource]NEWLINE resource.pagination_class = NoneNEWLINE test_client = TestClient(app=pagination_app)NEWLINENEWLINE with pytest.raises(Exception) as exc:NEWLINE test_client.get('/test-resource/')NEWLINE assert str(exc.value) == 'Pagination class must be defined to use pagination'NEWLINENEWLINENEWLINEdef test_incorrect_request_type(pagination_app: Starlette, monkeypatch: MonkeyPatch):NEWLINE test_client = TestClient(app=pagination_app)NEWLINE paginate_request_mock = MagicMock(return_value=Future())NEWLINE paginate_request_mock.return_value.set_result(([], {}))NEWLINENEWLINE monkeypatch.setattr(BaseResource, 'paginate_request', paginate_request_mock)NEWLINE rv = test_client.get('/test-resource/1')NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINE rv = test_client.post('/test-resource/', {})NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINE rv = test_client.patch('/test-resource/1', {})NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINE rv = test_client.delete('/test-resource/1', )NEWLINE assert rv.status_code == 200NEWLINE assert paginate_request_mock.not_calledNEWLINENEWLINENEWLINEdef test_specified_params(pagination_app: Starlette):NEWLINE test_client = TestClient(app=pagination_app)NEWLINENEWLINE # only size paramNEWLINE rv = test_client.get('/test-resource/?page[size]=1')NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '1',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'http://testserver/test-resource/?page%5Bnumber%5D=1&page%5Bsize%5D=1',NEWLINE 'next': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=1',NEWLINE 'prev': None,NEWLINE 'last': 'http://testserver/test-resource/?page%5Bnumber%5D=4&page%5Bsize%5D=1',NEWLINE }NEWLINE }NEWLINENEWLINE # page and size paramNEWLINE rv = test_client.get('/test-resource/?page[number]=3&page[size]=1')NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '3',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'http://testserver/test-resource/?page%5Bnumber%5D=1&page%5Bsize%5D=1',NEWLINE 'next': 'http://testserver/test-resource/?page%5Bnumber%5D=4&page%5Bsize%5D=1',NEWLINE 'prev': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=1',NEWLINE 'last': 'http://testserver/test-resource/?page%5Bnumber%5D=4&page%5Bsize%5D=1',NEWLINE }NEWLINE }NEWLINENEWLINENEWLINEdef test_default_value_enforcement(pagination_app: Starlette):NEWLINE test_client = TestClient(app=pagination_app)NEWLINENEWLINE rv = test_client.get('/test-resource/')NEWLINE assert rv.status_code == 200NEWLINE assert rv.json() == {NEWLINE 'data': [NEWLINE {NEWLINE 'id': '1',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE },NEWLINE {NEWLINE 'id': '2',NEWLINE 'type': 'test-resource',NEWLINE 'attributes': {NEWLINE 'name': 'foo'NEWLINE }NEWLINE }NEWLINE ],NEWLINE 'links': {NEWLINE 'first': 'http://testserver/test-resource/?page%5Bnumber%5D=1&page%5Bsize%5D=2',NEWLINE 'next': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=2',NEWLINE 'prev': None,NEWLINE 'last': 'http://testserver/test-resource/?page%5Bnumber%5D=2&page%5Bsize%5D=2',NEWLINE }NEWLINE }NEWLINE
# UniBorg (telegram userbot)NEWLINE# Copyright (C) 2020 The AuthorsNEWLINENEWLINE# This program is free software: you can redistribute it and/or modifyNEWLINE# it under the terms of the GNU Affero General Public License as published byNEWLINE# the Free Software Foundation, either version 3 of the License, orNEWLINE# (at your option) any later version.NEWLINENEWLINE# This program is distributed in the hope that it will be useful,NEWLINE# but WITHOUT ANY WARRANTY; without even the implied warranty ofNEWLINE# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See theNEWLINE# GNU Affero General Public License for more details.NEWLINENEWLINE# DeezLoader is an attempt to archive songs andNEWLINE# to serve the poor who can't afford legal copy of the songs.NEWLINE# If you are capable of buying andNEWLINE# spending money on songs in legal ways, please do so.NEWLINENEWLINE# The Author(s) of this module are not responsibleNEWLINE# for the usage of this program by other people.NEWLINENEWLINE# The Author(s) of this module do not recommendNEWLINE# doing it illegally or against Deezer's Terms of ServiceNEWLINENEWLINE# You should have received a copy of the GNU Affero General Public LicenseNEWLINE# along with this program. If not, see <https://www.gnu.org/licenses/>.NEWLINENEWLINE# requires: deezloader hachoir PillowNEWLINE# Ported from UniBorg by AnggaR96sNEWLINENEWLINEimport osNEWLINEimport shutilNEWLINEimport timeNEWLINENEWLINEimport deezloaderNEWLINEfrom hachoir.metadata import extractMetadataNEWLINEfrom hachoir.parser import createParserNEWLINEfrom telethon.tl.types import DocumentAttributeAudioNEWLINENEWLINEfrom userbot import DEEZER_ARL_TOKEN, TEMP_DOWNLOAD_DIRECTORYNEWLINEfrom userbot.events import registerNEWLINENEWLINENEWLINE@register(outgoing=True,NEWLINE pattern=r"^\.deez (.+?|) (FLAC|MP3\_320|MP3\_256|MP3\_128)")NEWLINEasync def _(event):NEWLINE """DeezLoader by @An0nimiaNEWLINE Ported for UniBorg by @SpEcHlDe"""NEWLINE if event.fwd_from:NEWLINE returnNEWLINENEWLINE strings = {NEWLINE "name": "DeezLoad",NEWLINE "arl_token_cfg_doc": "ARL Token for Deezer",NEWLINE "invalid_arl_token": "please set the required variables for this module",NEWLINE "wrong_cmd_syntax": "bruh, now i think how far should we go. please terminate my Session.",NEWLINE "server_error": "We're experiencing technical difficulties.",NEWLINE "processing": "`Downloading...`",NEWLINE "uploading": "`Uploading...`",NEWLINE }NEWLINENEWLINE ARL_TOKEN = DEEZER_ARL_TOKENNEWLINENEWLINE if ARL_TOKEN is None:NEWLINE await event.edit(strings["invalid_arl_token"])NEWLINE returnNEWLINENEWLINE try:NEWLINE loader = deezloader.Login(ARL_TOKEN)NEWLINE except Exception as er:NEWLINE await event.edit(str(er))NEWLINE returnNEWLINENEWLINE temp_dl_path = os.path.join(TEMP_DOWNLOAD_DIRECTORY, str(time.time()))NEWLINE if not os.path.exists(temp_dl_path):NEWLINE os.makedirs(temp_dl_path)NEWLINENEWLINE required_link = event.pattern_match.group(1)NEWLINE required_qty = event.pattern_match.group(2)NEWLINENEWLINE await event.edit(strings["processing"])NEWLINENEWLINE if "spotify" in required_link:NEWLINE if "track" in required_link:NEWLINE required_track = loader.download_trackspo(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE elif "album" in required_link:NEWLINE reqd_albums = loader.download_albumspo(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE zips=False,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE for required_track in reqd_albums:NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE elif "deezer" in required_link:NEWLINE if "track" in required_link:NEWLINE required_track = loader.download_trackdee(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE elif "album" in required_link:NEWLINE reqd_albums = loader.download_albumdee(NEWLINE required_link,NEWLINE output=temp_dl_path,NEWLINE quality=required_qty,NEWLINE recursive_quality=True,NEWLINE recursive_download=True,NEWLINE not_interface=True,NEWLINE zips=False,NEWLINE )NEWLINE await event.edit(strings["uploading"])NEWLINE for required_track in reqd_albums:NEWLINE await upload_track(required_track, event)NEWLINE shutil.rmtree(temp_dl_path)NEWLINE await event.delete()NEWLINENEWLINE else:NEWLINE await event.edit(strings["wrong_cmd_syntax"])NEWLINENEWLINENEWLINEasync def upload_track(track_location, message):NEWLINE metadata = extractMetadata(createParser(track_location))NEWLINE duration = 0NEWLINE title = ""NEWLINE performer = ""NEWLINE if metadata.has("duration"):NEWLINE duration = metadata.get("duration").secondsNEWLINE if metadata.has("title"):NEWLINE title = metadata.get("title")NEWLINE if metadata.has("artist"):NEWLINE performer = metadata.get("artist")NEWLINE document_attributes = [NEWLINE DocumentAttributeAudio(NEWLINE duration=duration,NEWLINE voice=False,NEWLINE title=title,NEWLINE performer=performer,NEWLINE waveform=None,NEWLINE )NEWLINE ]NEWLINE supports_streaming = TrueNEWLINE force_document = FalseNEWLINE caption_rts = os.path.basename(track_location)NEWLINE await message.client.send_file(NEWLINE message.chat_id,NEWLINE track_location,NEWLINE caption=caption_rts,NEWLINE force_document=force_document,NEWLINE supports_streaming=supports_streaming,NEWLINE allow_cache=False,NEWLINE attributes=document_attributes,NEWLINE )NEWLINE os.remove(track_location)NEWLINE
from .city import City NEWLINENEWLINE# base link: Raw Data: https://data.detroitmi.gov/Public-Safety/DPD-911-Calls-for-Service-September-20-2016-Presen/wgv9-drfcNEWLINE# raw data : https://www.dallasopendata.com/api/views/qv6i-rri7/rows.csv?accessType=DOWNLOADNEWLINENEWLINEclass Detroit(City):NEWLINE NEWLINE BASE_NAME = 'Detroit'NEWLINE DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'NEWLINE# DATE_FORMAT = '%m/%d/%Y %H:%M:%S %p'NEWLINENEWLINE DATA_URLS = {NEWLINE 'all': "https://opendata.arcgis.com/datasets/2dab2f70653f4bb8b4f2b51619ec8329_0.csv"NEWLINE }NEWLINE NEWLINE USE_YEARS = [2016,2017,2018]NEWLINENEWLINE COLUMN_TYPES = {NEWLINE 'intaketime' : str,NEWLINE 'dispatchtime' : str,NEWLINE 'traveltime':str,NEWLINE 'totalresponsetime' :str, NEWLINE 'time_on_scene' :str,NEWLINE 'totaltime':str, NEWLINE 'totaltime' : strNEWLINE }NEWLINENEWLINE COLUMN_TRANSFORMS ={NEWLINE 'call_timestamp' : 'date_time',NEWLINE }NEWLINENEWLINE COLUMN_MAPPINGS = {NEWLINE 'call_type' : 'calldescription',NEWLINE 'self_initiated' : 'officerinitiated',NEWLINE 'priority' : 'priority',NEWLINE 'beat':'precinct_sca',NEWLINE 'call_time' : 'call_timestamp',NEWLINE 'response_time' :'totalresponsetime'NEWLINE }NEWLINE NEWLINE BEAT_FILES = [NEWLINE {NEWLINE 'start_year' : 2016,NEWLINE 'end_year': 2019,NEWLINE 'path': 'geo_export_1a1f09d7-b148-4df7-8cd1-671ea3fbd22b.shp'NEWLINE }NEWLINE ]NEWLINE BEATS_IDS_GEOMETRY='area'NEWLINE NEWLINE RESPONSE_TIME_FACTOR = 60NEWLINE RESPONSE_TIME_COLUMN = 'response_time'NEWLINE# INPUT_CRS = {'init':'EPSG:26971'}NEWLINENEWLINE GEO_COLUMNS_REMAP = { 'latitude': 'lat' , 'longitude' : 'lng' }NEWLINE# GEO_UNIT_CONVERSION = 0.3048 #Feet to metersNEWLINENEWLINE def preprocess(self):NEWLINE print('using overloaded preprocess')NEWLINE beats = self.processed_data['beat']NEWLINE beats = beats.str.replace("[^0-9]", "").str.replace("^0","")NEWLINE self.processed_data['beat'] = beatsNEWLINE
#!/usr/bin/env python3NEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINE# Copyright (c) 2015,2016 Jérémie DECOCK (http://www.jdhp.org)NEWLINENEWLINE# Permission is hereby granted, free of charge, to any person obtaining a copyNEWLINE# of this software and associated documentation files (the "Software"), to dealNEWLINE# in the Software without restriction, including without limitation the rightsNEWLINE# to use, copy, modify, merge, publish, distribute, sublicense, and/or sellNEWLINE# copies of the Software, and to permit persons to whom the Software isNEWLINE# furnished to do so, subject to the following conditions:NEWLINENEWLINE# The above copyright notice and this permission notice shall be included inNEWLINE# all copies or substantial portions of the Software.NEWLINENEWLINE# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORNEWLINE# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,NEWLINE# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THENEWLINE# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERNEWLINE# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,NEWLINE# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS INNEWLINE# THE SOFTWARE.NEWLINENEWLINE"""NEWLINEA tiny module example.NEWLINE"""NEWLINENEWLINE__all__ = ['sing',NEWLINE 'gui']NEWLINENEWLINElyrics = """Row, row, row your boat,NEWLINEGently down the stream.NEWLINEMerrily, merrily, merrily, merrily,NEWLINELife is but a dream."""NEWLINENEWLINEdef sing():NEWLINE """NEWLINE The "Row, Row, Row Your Boat" nursery rhyme.NEWLINE """NEWLINENEWLINE print(lyrics)NEWLINENEWLINENEWLINEdef gui():NEWLINE import tkinter as tkNEWLINENEWLINE root = tk.Tk()NEWLINENEWLINE label = tk.Label(root, text=lyrics, justify=tk.CENTER)NEWLINE label.pack(pady=5)NEWLINENEWLINE root.mainloop()NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE sing()NEWLINE #gui()NEWLINENEWLINE
from io import BytesIONEWLINENEWLINEfrom PIL import ImageNEWLINEfrom flask import send_fileNEWLINENEWLINEfrom utils import httpNEWLINEfrom utils.endpoint import Endpoint, setupNEWLINENEWLINENEWLINE@setupNEWLINEclass Rip(Endpoint):NEWLINE params = ['avatar0']NEWLINENEWLINE def generate(self, avatars, text, usernames, kwargs):NEWLINE base = Image.open(self.assets.get('assets/rip/rip.bmp')).convert('RGBA').resize((642, 806))NEWLINE avatar = http.get_image(avatars[0]).resize((300, 300)).convert('RGBA')NEWLINENEWLINE base.paste(avatar, (175, 385), avatar)NEWLINE base = base.convert('RGBA')NEWLINENEWLINE b = BytesIO()NEWLINE base.save(b, format='png')NEWLINE b.seek(0)NEWLINE return send_file(b, mimetype='image/png')NEWLINE
"""netflixclone URL ConfigurationNEWLINENEWLINEThe `urlpatterns` list routes URLs to views. For more information please see:NEWLINE https://docs.djangoproject.com/en/1.11/topics/http/urls/NEWLINEExamples:NEWLINEFunction viewsNEWLINE 1. Add an import: from my_app import viewsNEWLINE 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')NEWLINEClass-based viewsNEWLINE 1. Add an import: from other_app.views import HomeNEWLINE 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')NEWLINEIncluding another URLconfNEWLINE 1. Import the include() function: from django.conf.urls import url, includeNEWLINE 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))NEWLINE"""NEWLINEfrom django.conf.urls import url, includeNEWLINEfrom django.contrib import adminNEWLINENEWLINEurlpatterns = [NEWLINE url(r'^admin/', admin.site.urls),NEWLINE url(r'', include('netflix.urls'))NEWLINE]NEWLINE
# Copyright 2020, The TensorFlow Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE# Lint as: python3NEWLINE"""An example for using keras_evaluation."""NEWLINENEWLINEfrom absl import appNEWLINEfrom absl import flagsNEWLINENEWLINEimport numpy as npNEWLINEimport tensorflow as tfNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.data_structures import AttackTypeNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.data_structures import get_flattened_attack_metricsNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.data_structures import SlicingSpecNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.keras_evaluation import MembershipInferenceCallbackNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.keras_evaluation import run_attack_on_keras_modelNEWLINENEWLINENEWLINEFLAGS = flags.FLAGSNEWLINEflags.DEFINE_float('learning_rate', 0.02, 'Learning rate for training')NEWLINEflags.DEFINE_integer('batch_size', 250, 'Batch size')NEWLINEflags.DEFINE_integer('epochs', 100, 'Number of epochs')NEWLINEflags.DEFINE_string('model_dir', None, 'Model directory.')NEWLINEflags.DEFINE_bool('tensorboard_merge_classifiers', False, 'If true, plot 'NEWLINE 'different classifiers with the same slicing_spec and metric 'NEWLINE 'in the same figure.')NEWLINENEWLINENEWLINEdef small_cnn():NEWLINE """Setup a small CNN for image classification."""NEWLINE model = tf.keras.models.Sequential()NEWLINE model.add(tf.keras.layers.Input(shape=(32, 32, 3)))NEWLINENEWLINE for _ in range(3):NEWLINE model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='relu'))NEWLINE model.add(tf.keras.layers.MaxPooling2D())NEWLINENEWLINE model.add(tf.keras.layers.Flatten())NEWLINE model.add(tf.keras.layers.Dense(64, activation='relu'))NEWLINE model.add(tf.keras.layers.Dense(10))NEWLINE return modelNEWLINENEWLINENEWLINEdef load_cifar10():NEWLINE """Loads CIFAR10 data."""NEWLINE (x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()NEWLINENEWLINE x_train = np.array(x_train, dtype=np.float32) / 255NEWLINE x_test = np.array(x_test, dtype=np.float32) / 255NEWLINENEWLINE y_train = np.array(y_train, dtype=np.int32).squeeze()NEWLINE y_test = np.array(y_test, dtype=np.int32).squeeze()NEWLINENEWLINE return x_train, y_train, x_test, y_testNEWLINENEWLINENEWLINEdef main(unused_argv):NEWLINE # Load training and test data.NEWLINE x_train, y_train, x_test, y_test = load_cifar10()NEWLINENEWLINE # Get model, optimizer and specify loss.NEWLINE model = small_cnn()NEWLINE optimizer = tf.keras.optimizers.SGD(lr=FLAGS.learning_rate, momentum=0.9)NEWLINE loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)NEWLINE model.compile(optimizer=optimizer, loss=loss, metrics=['accuracy'])NEWLINENEWLINE # Get callback for membership inference attack.NEWLINE mia_callback = MembershipInferenceCallback(NEWLINE (x_train, y_train),NEWLINE (x_test, y_test),NEWLINE slicing_spec=SlicingSpec(entire_dataset=True, by_class=True),NEWLINE attack_types=[AttackType.THRESHOLD_ATTACK,NEWLINE AttackType.K_NEAREST_NEIGHBORS],NEWLINE tensorboard_dir=FLAGS.model_dir,NEWLINE tensorboard_merge_classifiers=FLAGS.tensorboard_merge_classifiers)NEWLINENEWLINE # Train model with KerasNEWLINE model.fit(NEWLINE x_train,NEWLINE y_train,NEWLINE epochs=FLAGS.epochs,NEWLINE validation_data=(x_test, y_test),NEWLINE batch_size=FLAGS.batch_size,NEWLINE callbacks=[mia_callback],NEWLINE verbose=2)NEWLINENEWLINE print('End of training attack:')NEWLINE attack_results = run_attack_on_keras_model(NEWLINE model, (x_train, y_train), (x_test, y_test),NEWLINE slicing_spec=SlicingSpec(entire_dataset=True, by_class=True),NEWLINE attack_types=[NEWLINE AttackType.THRESHOLD_ATTACK, AttackType.K_NEAREST_NEIGHBORSNEWLINE ])NEWLINE att_types, att_slices, att_metrics, att_values = get_flattened_attack_metrics(NEWLINE attack_results)NEWLINE print('\n'.join([' %s: %.4f' % (', '.join([s, t, m]), v) for t, s, m, v inNEWLINE zip(att_types, att_slices, att_metrics, att_values)]))NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE app.run(main)NEWLINE
#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEfrom neutron_lib import exceptions as q_excNEWLINENEWLINENEWLINEclass ArrayLBaaSv2DriverException(q_exc.NeutronException):NEWLINE """General Array LBaaSv2 Driver Exception."""NEWLINENEWLINE def __init__(self, message=None):NEWLINE if message:NEWLINE self.message = messageNEWLINENEWLINENEWLINEclass ArrayNoAttachedLoadbalancerException(ArrayLBaaSv2DriverException):NEWLINE """Exception thrown when an LBaaSv2 object has not parent Loadbalancer."""NEWLINENEWLINE message = "Entity has no associated loadbalancer"NEWLINENEWLINE def __str__(self):NEWLINE return self.messageNEWLINE
# Copyright The IETF Trust 2018, All Rights ReservedNEWLINE# -*- coding: utf-8 -*-NEWLINEfrom __future__ import unicode_literals, print_function, divisionNEWLINENEWLINEimport datetimeNEWLINEimport i18naddressNEWLINEimport lxmlNEWLINEimport osNEWLINEimport reNEWLINE#import sysNEWLINEimport sixNEWLINEimport unicodedataNEWLINEimport xml2rfcNEWLINENEWLINEfrom io import openNEWLINEfrom lxml.html import html_parserNEWLINEfrom lxml.html.builder import ElementMakerNEWLINENEWLINEif six.PY2:NEWLINE from urllib import urlopenNEWLINEelif six.PY3:NEWLINE from urllib.request import urlopenNEWLINENEWLINEtry:NEWLINE import debugNEWLINE debug.debug = TrueNEWLINEexcept ImportError:NEWLINE passNEWLINENEWLINEfrom xml2rfc import log, stringsNEWLINEfrom xml2rfc.writers.base import default_options, BaseV3WriterNEWLINEfrom xml2rfc.uniscripts import is_scriptNEWLINEfrom xml2rfc.util.date import extract_date, format_date, format_date_iso, get_expiry_dateNEWLINEfrom xml2rfc.util.name import ( full_author_name_expansion, short_author_role,NEWLINE ref_author_name_first, ref_author_name_last, NEWLINE short_author_name_set, full_author_name_set,NEWLINE short_org_name_set, full_org_name, )NEWLINEfrom xml2rfc.util.postal import ( get_normalized_address_info, address_hcard_properties,NEWLINE enhance_address_format, address_field_mapping, )NEWLINEfrom xml2rfc.util.unicode import expand_unicode_elementNEWLINEfrom xml2rfc.utils import namespaces, is_htmlblock, find_duplicate_html_ids, build_dataurlNEWLINENEWLINE#from xml2rfc import utilsNEWLINENEWLINE# ------------------------------------------------------------------------------NEWLINENEWLINEseen = set()NEWLINENEWLINEdef wrap(h, tag, **kwargs):NEWLINE w = build(tag, **kwargs)NEWLINE w.append(h)NEWLINE return wNEWLINENEWLINEdef slugify(s):NEWLINE s = s.strip().lower()NEWLINE s = re.sub(r'[^\w\s/|-]', '', s)NEWLINE s = re.sub(r'[-_\s/|]+', '-', s)NEWLINE s = s.strip('-')NEWLINE return sNEWLINENEWLINEdef maybefloat(f):NEWLINE try:NEWLINE return float(f)NEWLINE except (ValueError, TypeError):NEWLINE return NoneNEWLINENEWLINEdef wrap_ascii(tag, conj, name, ascii, role='', classes=None):NEWLINE role = ('','') if role in ['',None] else (', ', role)NEWLINE if ascii:NEWLINE e = build(tag,NEWLINE build.span(conj, name, classes='non-ascii'),NEWLINE ' (',NEWLINE build.span(ascii, classes='ascii'),NEWLINE ')',NEWLINE *role,NEWLINE classes=classesNEWLINE )NEWLINE else:NEWLINE e = build(tag, conj, name, *role, classes=classes)NEWLINE return eNEWLINENEWLINE#def wrap_ascii_div(NEWLINENEWLINENEWLINEclass ClassElementMaker(ElementMaker):NEWLINENEWLINE def __call__(self, tag, *children, **attrib):NEWLINE classes = attrib.pop('classes', None)NEWLINE attrib = dict( (k,v) for k,v in attrib.items() if v != None)NEWLINE elem = super(ClassElementMaker, self).__call__(tag, *children, **attrib)NEWLINE if classes:NEWLINE elem.set('class', classes)NEWLINE if is_htmlblock(elem) and (elem.tail is None or elem.tail.strip() == ''):NEWLINE elem.tail = '\n'NEWLINE return elemNEWLINEbuild = ClassElementMaker(makeelement=html_parser.makeelement)NEWLINENEWLINEclass ExtendingElementMaker(ClassElementMaker):NEWLINENEWLINE def __call__(self, tag, parent, precursor, *children, **attrib):NEWLINE elem = super(ExtendingElementMaker, self).__call__(tag, *children, **attrib)NEWLINE is_block = is_htmlblock(elem)NEWLINE #NEWLINE child = elemNEWLINE if precursor != None:NEWLINE pn = precursor.get('pn')NEWLINE sn = precursor.get('slugifiedName')NEWLINE an = precursor.get('anchor')NEWLINE if pn != None:NEWLINE elem.set('id', pn)NEWLINE if an != None and is_block:NEWLINE child = wrap(elem, 'div', id=an)NEWLINE elif sn != None:NEWLINE elem.set('id', sn)NEWLINE elif an != None:NEWLINE elem.set('id', an)NEWLINE if not elem.text or elem.text.strip() == '':NEWLINE elem.text = precursor.textNEWLINE elem.tail = precursor.tailNEWLINE if parent != None:NEWLINE parent.append(child)NEWLINE if is_block and (elem.tail is None or elem.tail.strip() == ''):NEWLINE elem.tail = '\n'NEWLINE if is_block and child != elem and (child.tail is None or child.tail.strip() == ''):NEWLINE child.tail = '\n'NEWLINE return elemNEWLINEadd = ExtendingElementMaker(makeelement=html_parser.makeelement)NEWLINENEWLINENEWLINEpilcrow = '\u00b6'NEWLINEmdash = '\u2014'NEWLINENEWLINE# ------------------------------------------------------------------------------NEWLINE# Address formatting functions, based on i18naddress functions, but rewritten toNEWLINE# produce html entities, rather than text lines.NEWLINENEWLINEdef _format_address_line(line_format, address, rules):NEWLINE def _get_field(name):NEWLINE field = []NEWLINE values = address.get(name, '')NEWLINE if isinstance(values, list):NEWLINE values = [ v for v in values if v ]NEWLINE if values:NEWLINE if isinstance(values, list):NEWLINE for value in values[:-1]:NEWLINE field.append(build.div(value, classes=address_hcard_properties[name]))NEWLINE field.append(build.span(values[-1], classes=address_hcard_properties[name]))NEWLINE else:NEWLINE span = NoneNEWLINE if name == 'name':NEWLINE role = address.get('role', '')NEWLINE if role:NEWLINE span = build.span(values,NEWLINE ' (',NEWLINE build.span(role, classes='role'),NEWLINE ')',NEWLINE classes=address_hcard_properties[name])NEWLINE if span == None:NEWLINE span = build.span(values, classes=address_hcard_properties[name])NEWLINE field.append(span)NEWLINE return fieldNEWLINENEWLINE replacements = {NEWLINE '%%%s' % code: _get_field(field_name)NEWLINE for code, field_name in address_field_mapping.items()}NEWLINENEWLINE field_entries = re.split('(%.)', line_format)NEWLINE fields = [ f for n in field_entries for f in replacements.get(n, n) ]NEWLINE return fieldsNEWLINENEWLINEdef format_address(address, latin=False):NEWLINE rules = i18naddress.get_validation_rules(address)NEWLINE address_format = (NEWLINE rules.address_latin_format if latin else rules.address_format)NEWLINE address_format = enhance_address_format(address, address_format)NEWLINE address_line_formats = address_format.split('%n')NEWLINE address_lines = [NEWLINE build.div(*_format_address_line(lf, address, rules), dir='auto')NEWLINE for lf in address_line_formats]NEWLINE address_lines = filter(lambda n: n!=None and ''.join(list(n.itertext())), address_lines)NEWLINE return address_linesNEWLINENEWLINEdef get_bidi_alignment(address):NEWLINE # We don't attempt to control the bidi layout in detail, but leave that toNEWLINE #the html layout engine; but want to know whether we have Right-to-left contentNEWLINE #in order to set the overall alignment of the address block.NEWLINE for field in address:NEWLINE line = address[field]NEWLINE if line:NEWLINE for ch in line:NEWLINE if isinstance(ch, six.text_type):NEWLINE dir = unicodedata.bidirectional(ch)NEWLINE if dir in ['R', 'AL']:NEWLINE return 'right'NEWLINE return 'left'NEWLINE NEWLINE# ------------------------------------------------------------------------------NEWLINENEWLINEclass HtmlWriter(BaseV3Writer):NEWLINENEWLINE def __init__(self, xmlrfc, quiet=None, options=default_options, date=datetime.date.today()):NEWLINE super(HtmlWriter, self).__init__(xmlrfc, quiet=quiet, options=options, date=date)NEWLINE self.anchor_tags = self.get_tags_with_anchor()NEWLINE self.duplicate_html_ids = set()NEWLINENEWLINE def get_tags_with_anchor(self):NEWLINE anchor_nodes = self.schema.xpath("//x:define/x:element//x:attribute[@name='anchor']", namespaces=namespaces)NEWLINE element_nodes = set()NEWLINE for a in anchor_nodes:NEWLINE for e in a.iterancestors():NEWLINE if e.tag.endswith('element'):NEWLINE element_nodes.add(e.get('name'))NEWLINE breakNEWLINE return element_nodesNEWLINENEWLINE def html_tree(self):NEWLINE if not self.root.get('prepTime'):NEWLINE prep = xml2rfc.PrepToolWriter(self.xmlrfc, options=self.options, date=self.options.date, liberal=True, keep_pis=[xml2rfc.V3_PI_TARGET])NEWLINE tree = prep.prep()NEWLINE self.tree = treeNEWLINE self.root = self.tree.getroot()NEWLINE html_tree = self.render(None, self.root)NEWLINE html_tree = self.post_process(html_tree)NEWLINE return html_treeNEWLINENEWLINE def html(self, html_tree=None):NEWLINE if html_tree is None:NEWLINE html_tree = self.html_tree()NEWLINE # 6.1. DOCTYPENEWLINE # NEWLINE # The DOCTYPE of the document is "html", which declares that theNEWLINE # document is compliant with HTML5. The document will start withNEWLINE # exactly this string:NEWLINE # NEWLINE # <!DOCTYPE html>NEWLINE html = lxml.etree.tostring(html_tree, method='html', encoding='unicode', pretty_print=True, doctype="<!DOCTYPE html>")NEWLINE html = re.sub(r'[\x00-\x09\x0B-\x1F]+', ' ', html)NEWLINE return htmlNEWLINENEWLINE def write(self, filename):NEWLINE self.filename = filenameNEWLINENEWLINE """Write the document to a file """NEWLINE html_tree = self.html_tree()NEWLINENEWLINE # Check for duplicate IDsNEWLINE dups = set(find_duplicate_html_ids(html_tree)) - self.duplicate_html_idsNEWLINE for attr, id, e in dups:NEWLINE self.warn(self.root[-1], 'Duplicate %s="%s" found in generated HTML.' % (attr, id, ))NEWLINENEWLINE if self.errors:NEWLINE log.write("Not creating output file due to errors (see above)")NEWLINE returnNEWLINENEWLINE # Use lxml's built-in serializationNEWLINE with open(filename, 'w', encoding='utf-8') as file:NEWLINE text = self.html(html_tree)NEWLINE file.write(text)NEWLINENEWLINE if not self.options.quiet:NEWLINE log.write('Created file', filename)NEWLINENEWLINENEWLINE def render(self, h, x):NEWLINE res = NoneNEWLINE if x.tag in (lxml.etree.PI, lxml.etree.Comment):NEWLINE tail = x.tail if x.tail and x.tail.strip() else ''NEWLINE if len(h):NEWLINE last = h[-1]NEWLINE last.tail = (last.tail or '') + tailNEWLINE else:NEWLINE h.text = (h.text or '') + tailNEWLINE else:NEWLINE func_name = "render_%s" % (x.tag.lower(),)NEWLINE func = getattr(self, func_name, None)NEWLINE if func == None:NEWLINE func = self.default_rendererNEWLINE if x.tag in self.__class__.deprecated_element_tags:NEWLINE self.warn(x, "Was asked to render a deprecated element: <%s>", (x.tag, ))NEWLINE elif not x.tag in seen:NEWLINE self.warn(x, "No renderer for <%s> found" % (x.tag, ))NEWLINE seen.add(x.tag)NEWLINE res = func(h, x)NEWLINE return resNEWLINENEWLINENEWLINE def default_renderer(self, h, x):NEWLINE hh = add(x.tag, h, x)NEWLINE for c in x.getchildren():NEWLINE self.render(hh, c)NEWLINE return hhNEWLINENEWLINE def skip_renderer(self, h, x):NEWLINE part = self.partNEWLINE for c in x.getchildren():NEWLINE self.part = partNEWLINE self.render(h, c)NEWLINENEWLINE def address_line_renderer(self, h, x, classes=None):NEWLINE if x.text:NEWLINE div = build.div(build.span(x.text.strip(), classes=classes))NEWLINE h.append(div)NEWLINE else:NEWLINE div = NoneNEWLINE return divNEWLINENEWLINE def inline_text_renderer(self, h, x):NEWLINE h.text = x.textNEWLINE for c in x.getchildren():NEWLINE self.render(h, c)NEWLINE h.tail = x.tailNEWLINENEWLINE def null_renderer(self, h, x):NEWLINE return NoneNEWLINENEWLINE def maybe_add_pilcrow(self, e):NEWLINE if len(e.xpath('.//*[@class="pilcrow"]')) == 0:NEWLINE id = e.get('id')NEWLINE if id:NEWLINE add.a(e, None, pilcrow, classes='pilcrow', href='#%s'%id)NEWLINE else:NEWLINE self.warn(e, 'Tried to add a pilcrow to <%s>, but found no "id" attribute' % e.tag)NEWLINENEWLINE # --- element rendering functions ------------------------------------------NEWLINENEWLINE def render_rfc(self, h, x):NEWLINE self.part = x.tagNEWLINE # 6.2. Root ElementNEWLINE # NEWLINE # The root element of the document is <html>. This element includes aNEWLINE # "lang" attribute, whose value is a language tag, as discussed inNEWLINE # [RFC5646], that describes the natural language of the document. TheNEWLINE # language tag to be included is "en". The class of the <html> elementNEWLINE # will be copied verbatim from the XML <rfc> element's <front>NEWLINE # element's <seriesInfo> element's "name" attributes (separated byNEWLINE # spaces; see Section 2.47.3 of [RFC7991]), allowing CSS to style RFCsNEWLINE # and Internet-Drafts differently from one another (if needed):NEWLINE # NEWLINE # <html lang="en" class="RFC">NEWLINENEWLINE classes = ' '.join( i.get('name') for i in x.xpath('./front/seriesInfo') )NEWLINE #NEWLINE html = h if h != None else build.html(classes=classes, lang='en')NEWLINE self.html_root = htmlNEWLINENEWLINE # 6.3. <head> ElementNEWLINE # NEWLINE # The root <html> will contain a <head> element that contains theNEWLINE # following elements, as needed.NEWLINENEWLINE head = add.head(html, None)NEWLINENEWLINE # 6.3.1. Charset DeclarationNEWLINE # NEWLINE # In order to be correctly processed by browsers that load the HTMLNEWLINE # using a mechanism that does not provide a valid content-type orNEWLINE # charset (such as from a local file system using a "file:" URL), theNEWLINE # HTML <head> element contains a <meta> element, whose "charset"NEWLINE # attribute value is "utf-8":NEWLINE # NEWLINE # <meta charset="utf-8">NEWLINENEWLINE add.meta(head, None, charset='utf-8')NEWLINE add.meta(head, None, name="scripts", content=x.get('scripts'))NEWLINENEWLINE # 6.3.2. Document TitleNEWLINE # NEWLINE # The contents of the <title> element from the XML source will beNEWLINE # placed inside an HTML <title> element in the header.NEWLINENEWLINE title = x.find('./front/title')NEWLINE text = title.textNEWLINE if self.options.rfc:NEWLINE text = ("RFC %s: " % self.root.get('number')) + textNEWLINE add.title(head, None, text)NEWLINENEWLINE # 6.3.3. Document MetadataNEWLINE # NEWLINE # The following <meta> elements will be included:NEWLINE # NEWLINE # o author - one each for the each of the "fullname"s andNEWLINE # "asciiFullname"s of all of the <author>s from the <front> of theNEWLINE # XML sourceNEWLINE for a in x.xpath('./front/author'):NEWLINE if not a.get('role') == 'contributor':NEWLINE name = full_author_name_expansion(a) or full_org_name(a)NEWLINE add.meta(head, None, name='author', content=name )NEWLINENEWLINE # o description - the <abstract> from the XML sourceNEWLINENEWLINE abstract = x.find('./front/abstract')NEWLINE if abstract != None:NEWLINE abstract_text = ' '.join(abstract.itertext())NEWLINE add.meta(head, None, name='description', content=abstract_text)NEWLINENEWLINE # o generator - the name and version number of the software used toNEWLINE # create the HTMLNEWLINENEWLINE generator = "%s %s" % (xml2rfc.NAME, xml2rfc.__version__)NEWLINE add.meta(head, None, name='generator', content=generator)NEWLINE NEWLINE # o keywords - comma-separated <keyword>s from the XML sourceNEWLINENEWLINE for keyword in x.xpath('./front/keyword'):NEWLINE add.meta(head, None, name='keyword', content=keyword.text)NEWLINENEWLINE # For example:NEWLINE # NEWLINE # <meta name="author" content="Joe Hildebrand">NEWLINE # <meta name="author" content="JOE HILDEBRAND">NEWLINE # <meta name="author" content="Heather Flanagan">NEWLINE # <meta name="description" content="This document defines...">NEWLINE # <meta name="generator" content="xmljade v0.2.4">NEWLINE # <meta name="keywords" content="html,css,rfc">NEWLINE # NEWLINE # Note: the HTML <meta> tag does not contain a closing slash.NEWLINE # NEWLINE # 6.3.4. Link to XML SourceNEWLINE # NEWLINE # The <head> element contains a <link> tag, with "rel" attribute ofNEWLINE # "alternate", "type" attribute of "application/rfc+xml", and "href"NEWLINE # attribute pointing to the prepared XML source that was used toNEWLINE # generate this document.NEWLINE # NEWLINE # <link rel="alternate" type="application/rfc+xml" href="source.xml">NEWLINENEWLINE add.link(head, None, rel='alternate', type='application/rfc+xml', href=self.xmlrfc.source)NEWLINENEWLINE # 6.3.5. Link to LicenseNEWLINE # NEWLINE # The <head> element contains a <link> tag, with "rel" attribute ofNEWLINE # "license" and "href" attribute pointing to the an appropriateNEWLINE # copyright license for the document.NEWLINE # NEWLINE # <link rel="license"NEWLINE # href="https://trustee.ietf.org/trust-legal-provisions.html">NEWLINENEWLINE add.link(head, None, rel='license', href="#copyright")NEWLINENEWLINE # 6.3.6. StyleNEWLINE # NEWLINE # The <head> element contains an embedded CSS in a <style> element.NEWLINE # The styles in the style sheet are to be set consistently betweenNEWLINE # documents by the RFC Editor, according to the best practices of theNEWLINE # day.NEWLINE # NEWLINE # To ensure consistent formatting, individual style attributes shouldNEWLINE # not be used in the main portion of the document.NEWLINE # NEWLINE # Different readers of a specification will desire different formattingNEWLINE # when reading the HTML versions of RFCs. To facilitate this, theNEWLINE # <head> element also includes a <link> to a style sheet in the sameNEWLINE # directory as the HTML file, named "rfc-local.css". Any formatting inNEWLINE # the linked style sheet will override the formatting in the includedNEWLINE # style sheet. For example:NEWLINE # NEWLINE # <style>NEWLINE # body {}NEWLINE # ...NEWLINE # </style>NEWLINE # <link rel="stylesheet" type="text/css" href="rfc-local.css">NEWLINENEWLINE cssin = self.options.css or os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'xml2rfc.css')NEWLINE with open(cssin, encoding='utf-8') as f:NEWLINE css = f.read()NEWLINENEWLINE if self.options.external_css:NEWLINE cssout = os.path.join(os.path.dirname(self.filename), 'xml2rfc.css')NEWLINE with open(cssout, 'w', encoding='utf-8') as f:NEWLINE f.write(css)NEWLINE add.link(head, None, rel="stylesheet", href="xml2rfc.css", type="text/css")NEWLINE elif self.options.no_css:NEWLINE passNEWLINE else:NEWLINE add.style(head, None, css, type="text/css")NEWLINE add.link(head, None, rel="stylesheet", href="rfc-local.css", type="text/css")NEWLINENEWLINE # 6.3.7. LinksNEWLINE # NEWLINE # Each <link> element from the XML source is copied into the HTMLNEWLINE # header. Note: the HTML <link> element does not include a closingNEWLINE # slash.NEWLINENEWLINE for link in x.xpath('./link'):NEWLINE head.append(link)NEWLINENEWLINE body = add.body(html, None)NEWLINENEWLINE # 6.4. Page Headers and FootersNEWLINE # NEWLINE # In order to simplify printing by HTML renderers that implementNEWLINE # [W3C.WD-css3-page-20130314], a hidden HTML <table> tag of classNEWLINE # "ears" is added at the beginning of the HTML <body> tag, containingNEWLINE # HTML <thead> and <tfoot> tags, each of which contains an HTML <tr>NEWLINE # tag, which contains three HTML <td> tags with class "left", "center",NEWLINE # and "right", respectively.NEWLINE # NEWLINE # The <thead> corresponds to the top of the page, the <tfoot> to theNEWLINE # bottom. The string "[Page]" can be used as a placeholder for theNEWLINE # page number. In practice, this must always be in the <tfoot>'s rightNEWLINE # <td>, and no control of the formatting of the page number is implied.NEWLINE #NEWLINE # <table class="ears">NEWLINE # <thead>NEWLINE # <tr>NEWLINE # <td class="left">Internet-Draft</td>NEWLINE # <td class="center">HTML RFC</td>NEWLINE # <td class="right">March 2016</td>NEWLINE # </tr>NEWLINE # </thead>NEWLINE # <tfoot>NEWLINE # <tr>NEWLINE # <td class="left">Hildebrand</td>NEWLINE # <td class="center">Expires September 2, 2016</td>NEWLINE # <td class="right">[Page]</td>NEWLINE # </tr>NEWLINE # </tfoot>NEWLINE # </table>NEWLINENEWLINE body.append(NEWLINE build.table(NEWLINE build.thead(NEWLINE build.tr(NEWLINE build.td(self.page_top_left(), classes='left'),NEWLINE build.td(self.page_top_center(), classes='center'),NEWLINE build.td(self.page_top_right(), classes='right'),NEWLINE ),NEWLINE ),NEWLINE build.tfoot(NEWLINE build.tr(NEWLINE build.td(self.page_bottom_left(), classes='left'),NEWLINE build.td(self.page_bottom_center(), classes='center'),NEWLINE build.td("[Page]", classes='right'),NEWLINE ),NEWLINE ),NEWLINE classes='ears',NEWLINE )NEWLINE )NEWLINENEWLINE for c in [ e for e in [ x.find('front'), x.find('middle'), x.find('back') ] if e != None]:NEWLINE self.part = c.tagNEWLINE self.render(body, c)NEWLINENEWLINE jsin = self.options.css or os.path.join(os.path.dirname(os.path.dirname(__file__)), 'data', 'xml2rfc.js')NEWLINE with open(jsin, encoding='utf-8') as f:NEWLINE js = f.read()NEWLINE add.script(body, None, js)NEWLINENEWLINE return htmlNEWLINENEWLINE # 9.1. <abstract>NEWLINE # NEWLINE # The abstract is rendered in a similar fashion to a <section> withNEWLINE # anchor="abstract" and <name>Abstract</name>, but without a sectionNEWLINE # number.NEWLINE # NEWLINE # <section id="abstract">NEWLINE # <h2><a href="#abstract" class="selfRef">Abstract</a></h2>NEWLINE # <p id="s-abstract-1">This document defines...NEWLINE # <a href="#s-abstract-1" class="pilcrow">&para;</a>NEWLINE # </p>NEWLINE # </section>NEWLINE # NEWLINENEWLINE def render_abstract(self, h, x):NEWLINE if self.part == 'front':NEWLINE section = add.section(h, x)NEWLINE section.append( build.h2( build.a('Abstract', classes='selfRef', href="#abstract"), id="abstract"))NEWLINE for c in x.getchildren():NEWLINE self.render(section, c)NEWLINE return sectionNEWLINE else:NEWLINE return NoneNEWLINENEWLINE # 9.2. <address>NEWLINE # NEWLINE # This element is used in the Authors' Addresses section. It isNEWLINE # rendered as an HTML <address> tag of class "vcard". If none of theNEWLINE # descendant XML elements has an "ascii" attribute, the <address> HTMLNEWLINE # tag includes the HTML rendering of each of the descendant XMLNEWLINE # elements. Otherwise, the <address> HTML tag includes an HTML <div>NEWLINE # tag of class "ascii" (containing the HTML rendering of the ASCIINEWLINE # variants of each of the descendant XML elements), an HTML <div> tagNEWLINE # of class "alternative-contact", (containing the text "AlternateNEWLINE # contact information:"), and an HTML <div> tag of class "non-ascii"NEWLINE # (containing the HTML rendering of the non-ASCII variants of each ofNEWLINE # the descendant XML elements).NEWLINE # NEWLINE # Note: the following example shows some ASCII equivalents that are theNEWLINE # same as their nominal equivalents for clarity; normally, the ASCIINEWLINE # equivalents would not be included for these cases.NEWLINE # NEWLINE # <address class="vcard">NEWLINE # <div class="ascii">NEWLINE # <div class="nameRole"><span class="fn">Joe Hildebrand</span>NEWLINE # (<span class="role">editor</span>)</div>NEWLINE # <div class="org">Cisco Systems, Inc.</div>NEWLINE # </div>NEWLINE # <div class="alternative-contact">NEWLINE # Alternate contact information:NEWLINE # </div>NEWLINE # <div class="non-ascii">NEWLINE # <div class="nameRole"><span class="fn">Joe Hildebrand</span>NEWLINE # (<span class="role">editor</span>)</div>NEWLINE # <div class="org">Cisco Systems, Inc.</div>NEWLINE # </div>NEWLINE # </address>NEWLINENEWLINE ## The above text is reasonable for author name and org, but nonsense forNEWLINE ## the <address> element. The following text will be used:NEWLINE ##NEWLINE ## The <address> element will be rendered as a sequence of <div> elements,NEWLINE ## each corresponding to a child element of <address>. Element classesNEWLINE ## will be taken from hcard, as specified on http://microformats.org/wiki/hcardNEWLINE ## NEWLINE ## <address class="vcard">NEWLINE ##NEWLINE ## <!-- ... name, role, and organization elements ... -->NEWLINE ##NEWLINE ## <div class="adr">NEWLINE ## <div class="street-address">1 Main Street</div>NEWLINE ## <div class="street-address">Suite 1</div>NEWLINE ## <div class="city-region-code">NEWLINE ## <span class="city">Denver</span>,&nbsp;NEWLINE ## <span class="region">CO</span>&nbsp;NEWLINE ## <span class="postal-code">80202</span>NEWLINE ## </div>NEWLINE ## <div class="country-name">USA</div>NEWLINE ## </div>NEWLINE ## <div class="tel">NEWLINE ## <span>Phone:</span>NEWLINE ## <a class="tel" href="tel:+1-720-555-1212">+1-720-555-1212</a>NEWLINE ## </div>NEWLINE ## <div class="fax">NEWLINE ## <span>Fax:</span>NEWLINE ## <span class="tel">+1-303-555-1212</span>NEWLINE ## </div>NEWLINE ## <div class="email">NEWLINE ## <span>Email:</span>NEWLINE ## <a class="email" href="mailto:author@example.com">author@example.com</a>NEWLINE ## </div>NEWLINE ## </address>NEWLINE render_address = skip_rendererNEWLINENEWLINE # 9.3. <annotation>NEWLINE # NEWLINE # This element is rendered as the text ", " (a comma and a space)NEWLINE # followed by a <span> of class "annotation" at the end of aNEWLINE # <reference> element, the <span> containing appropriately transformedNEWLINE # elements from the children of the <annotation> tag.NEWLINE # NEWLINE # <span class="annotation">Some <em>thing</em>.</span>NEWLINE def render_annotation(self, h, x):NEWLINE span = add.span(h, x, classes='annotation')NEWLINE for c in x.getchildren():NEWLINE self.render(span, c)NEWLINE return spanNEWLINENEWLINE # 9.4. <area>NEWLINE # NEWLINE # Not currently rendered to HTML.NEWLINE # NEWLINENEWLINE def render_artset(self, h, x):NEWLINE preflist = ['svg', 'binary-art', 'ascii-art', ]NEWLINE for t in preflist:NEWLINE for a in x.xpath('./artwork[@type="%s"]' % t):NEWLINE artwork = self.render(h, a)NEWLINE return artworkNEWLINE else:NEWLINE artwork = self.render(h, x[0])NEWLINE return artworkNEWLINENEWLINE # 9.5. <artwork>NEWLINE # NEWLINE # Artwork can consist of either inline text or SVG. If the artwork isNEWLINE # not inside a <figure> element, a pilcrow (Section 5.2) is included.NEWLINE # Inside a <figure> element, the figure title serves the purpose of theNEWLINE # pilcrow. If the "align" attribute has the value "right", the CSSNEWLINE # class "alignRight" will be added. If the "align" attribute has theNEWLINE # value "center", the CSS class "alignCenter" will be added.NEWLINE def render_artwork(self, h, x):NEWLINE type = x.get('type')NEWLINE align = x.get('align', 'left')NEWLINENEWLINE # 9.5.1. Text ArtworkNEWLINE # NEWLINE # Text artwork is rendered inside an HTML <pre> element, which isNEWLINE # contained by a <div> element for consistency with SVG artwork. NoteNEWLINE # that CDATA blocks are not a part of HTML, so angle brackets andNEWLINE # ampersands (i.e., <, >, and &) must be escaped as &lt;, &gt;, andNEWLINE # &amp;, respectively.NEWLINE # NEWLINE # The <div> element will have CSS classes of "artwork", "art-text", andNEWLINE # "art-" prepended to the value of the <artwork> element's "type"NEWLINE # attribute, if it exists.NEWLINE # NEWLINE # <div class="artwork art-text art-ascii-art" id="s-1-2">NEWLINE # <pre>NEWLINE # ______________NEWLINE # &lt; hello, world &gt;NEWLINE # --------------NEWLINE # \ ^__^NEWLINE # \ (oo)\_______NEWLINE # (__)\ )\/\NEWLINE # ||----w |NEWLINE # || ||NEWLINE # </pre>NEWLINE # <a class="pilcrow" href="#s-1-2">&para;</a>NEWLINE # </div>NEWLINE if type not in ['svg', 'binary-art', ]:NEWLINE if not x.text or not x.text.strip():NEWLINE self.err(x, 'Expected ascii-art artwork for <artwork type="%s">, but found %s...' % (x.get('type',''), lxml.etree.tostring(x)[:128]))NEWLINE return NoneNEWLINE else:NEWLINE pre = build.pre(x.text.expandtabs())NEWLINE classes = 'artwork art-text align%s' % align.capitalize()NEWLINE if type and type != 'text':NEWLINE classes += ' art-%s' % typeNEWLINE div = add.div(h, x, pre, classes=classes)NEWLINE div.text = NoneNEWLINE if x.getparent().tag != 'figure':NEWLINE self.maybe_add_pilcrow(div)NEWLINE return divNEWLINE NEWLINE # 9.5.2. SVG ArtworkNEWLINE # NEWLINE # SVG artwork will be included inline. The SVG is wrapped in a <div>NEWLINE # element with CSS classes "artwork" and "art-svg".NEWLINE # NEWLINE # If the SVG "artwork" element is a child of <figure> and the artworkNEWLINE # is specified as align="right", an empty HTML <span> element is addedNEWLINE # directly after the <svg> element, in order to get right alignment toNEWLINE # work correctly in HTML rendering engines that do not support theNEWLINE # flex-box model.NEWLINE # NEWLINE # Note: the "alt" attribute of <artwork> is not currently used for SVG;NEWLINE # instead, the <title> and <desc> tags are used in the SVG.NEWLINE # NEWLINE # <div class="artwork art-svg" id="s-2-17">NEWLINE # <svg width="100" height="100" xmlns="http://www.w3.org/2000/svg">NEWLINE # <desc>Alt text here</desc>NEWLINE # <circleNEWLINE # cx="50" cy="50" r="40"NEWLINE # stroke="green" stroke-width="4" fill="yellow" />NEWLINE # </svg>NEWLINE # <a href="#s-2-17" class="pilcrow">&para;</a>NEWLINE # </div>NEWLINE elif type == 'svg':NEWLINE classes = 'artwork art-svg align%s' % align.capitalize()NEWLINE div = add.div(h, x, classes=classes)NEWLINE div.text = NoneNEWLINE src = x.get('src')NEWLINE if src:NEWLINE svgfile = x.get('originalSrc') or x.get('src')[:37]+' ...'NEWLINE if not src.startswith('data:'):NEWLINE self.err(x, "Internal error: Got an <artwork> src: attribute that did not start with 'data:' after prepping")NEWLINE try:NEWLINE f = urlopen(src)NEWLINE data = f.read()NEWLINE svg = lxml.etree.fromstring(data)NEWLINE except IOError as e:NEWLINE self.err(x, str(e))NEWLINE svg = NoneNEWLINE else:NEWLINE svg = x.find('svg:svg', namespaces=namespaces)NEWLINE svgfile = "inline:%s ..." % lxml.etree.tostring(svg)[:31]NEWLINE if svg == None:NEWLINE self.err(x, 'Expected <svg> content inside <artwork type="svg">, but did not find it:\n %s ...' % (lxml.etree.tostring(x)[:256], ))NEWLINE return NoneNEWLINE # For w3.org validator compatibilityNEWLINE if svg.get('attribute', None):NEWLINE del svg.attrib['version']NEWLINE #NEWLINE # Deal with possible svg scaling issues.NEWLINE vbox = svg.get('viewBox')NEWLINE svgw = maybefloat(svg.get('width'))NEWLINE svgh = maybefloat(svg.get('height'))NEWLINE try:NEWLINE if vbox:NEWLINE xo,yo,w,h = re.split(',? +', vbox.strip('()'))NEWLINE # rewrite viewbox in the simplest syntax, in case needed for pdf libNEWLINE svg.set('viewBox', '%s %s %s %s' % (xo,yo,w,h))NEWLINE if not (svgw and svgh):NEWLINE svgw = float(w)-float(xo)NEWLINE svgh = float(h)-float(yo)NEWLINE else:NEWLINE if svgw and svgh:NEWLINE svg.set('viewBox', '0 0 %s %s' % (svgw, svgh))NEWLINE else:NEWLINE self.err(x, "Cannot place SVG properly when neither viewBox nor width and height is available") NEWLINE return NoneNEWLINE except ValueError as e:NEWLINE self.err(x, "Error when calculating SVG size: %s" % e)NEWLINE imgw = 660 if self.options.image_svg else 724NEWLINE if imgw < svgw:NEWLINE svg.set('width', str(svgw/svgw*imgw))NEWLINE svg.set('height', str(svgh/svgw*imgw))NEWLINE #NEWLINE if self.options.image_svg:NEWLINE data = build_dataurl('image/svg+xml', lxml.etree.tostring(svg))NEWLINE add.img(div, None, src=data, alt=x.get('alt'))NEWLINE else:NEWLINE div.append(svg)NEWLINE if x.getparent().tag != 'figure':NEWLINE self.maybe_add_pilcrow(div)NEWLINE else:NEWLINE if x.get('align') == 'right':NEWLINE add.span(div, None)NEWLINE #NEWLINE dups = set(find_duplicate_html_ids(self.html_root))NEWLINE new = dups - self.duplicate_html_idsNEWLINE for attr, id, e in new:NEWLINE self.warn(x, 'Duplicate attribute %s="%s" found after including svg from %s. This can cause problems with some browsers.' % (attr, id, svgfile))NEWLINE self.duplicate_html_ids = self.duplicate_html_ids | dupsNEWLINENEWLINE # 9.5.3. Other ArtworkNEWLINE # NEWLINE # Other artwork will have a "src" attribute that uses the "data" URINEWLINE # scheme defined in [RFC2397]. Such artwork is rendered in an HTMLNEWLINE # <img> element. Note: the HTML <img> element does not have a closingNEWLINE # slash.NEWLINE # NEWLINE # Note: such images are not yet allowed in RFCs even though the formatNEWLINE # supports them. A limited set of "data:" mediatypes for artwork mayNEWLINE # be allowed in the future.NEWLINE # NEWLINE # <div class="artwork art-logo" id="s-2-58">NEWLINE # <img alt="IETF logo"NEWLINE # src="data:image/gif;charset=utf-8;base64,...">NEWLINE # <a class="pilcrow" href="#s-2-58">&para;</a>NEWLINE # </div>NEWLINE elif type == 'binary-art':NEWLINE div = add.div(h, x, classes='artwork art-svg')NEWLINE data = x.get('src')NEWLINE if data:NEWLINE del div.attrib['src']NEWLINE add.img(div, None, src=data)NEWLINE else:NEWLINE self.err(x, 'Expected <img> data given by src="" for <artwork type="binary-art">, but did not find it: %s ...' % (lxml.etree.tostring(x)[:128], ))NEWLINE if x.getparent().tag != 'figure':NEWLINE self.maybe_add_pilcrow(div)NEWLINENEWLINE # 9.6. <aside>NEWLINE # NEWLINE # This element is rendered as an HTML <aside> element, with all childNEWLINE # content appropriately transformed.NEWLINE # NEWLINE # <aside id="s-2.1-2">NEWLINE # <p id="s-2.1-2.1">NEWLINE # A little more than kin, and less than kind.NEWLINE # <a class="pilcrow" href="#s-2.1-2.1">&para;</a>NEWLINE # </p>NEWLINE # </aside>NEWLINE render_aside = default_rendererNEWLINE NEWLINENEWLINE # NEWLINE # 9.7. <author>NEWLINE # NEWLINE # The <author> element is used in several places in the output.NEWLINE # Different rendering is used for each.NEWLINE def render_author(self, h, x):NEWLINE if len(x)==0 and len(x.attrib)==0:NEWLINE return NoneNEWLINENEWLINE # 9.7.1. Authors in Document InformationNEWLINE # NEWLINE # As seen in the Document Information at the beginning of the HTML,NEWLINE # each document author is rendered as an HTML <div> tag of classNEWLINE # "author".NEWLINE # NEWLINE # Inside the <div class="author"> HTML tag, the author's initials andNEWLINE # surname (or the fullname, if it exists and the others do not) will beNEWLINE # rendered in an HTML <div> tag of class "author-name". If theNEWLINE # <author> contains "asciiInitials" and "asciiSurname" attributes, orNEWLINE # contains as "asciiFullname" attribute, the author's name is renderedNEWLINE # twice, with the first being the non-ASCII version, wrapped in an HTMLNEWLINE # <span> tag of class "non-ascii", followed by the ASCII versionNEWLINE # wrapped in an HTML <span> tag of class "ascii", wrapped inNEWLINE # parentheses. If the <author> has a "role" attribute of "editor", theNEWLINE # <div class="author-name"> will also contain the text ", " (comma,NEWLINE # space), followed by an HTML <span> tag of class "editor", whichNEWLINE # contains the text "Ed.".NEWLINE # NEWLINE # If the <author> element contains an <organization> element, it isNEWLINE # also rendered inside the <div class="author"> HTML tag.NEWLINE # NEWLINE # <div class="author">NEWLINE # <div class="author-name">NEWLINE # H. Flanagan,NEWLINE # <span class="editor">Ed.</span></div>NEWLINE # <div class="org">Test Org</div>NEWLINE # </div>NEWLINE # <div class="author">NEWLINE # <div class="author-name">NEWLINE # <span class="non-ascii">Hildebrand</span>NEWLINE # (<span class="ascii">HILDEBRAND</span>)NEWLINE # </div>NEWLINE # <div class="org">NEWLINE # <span class="non-ascii">Test Org</span>NEWLINE # (<span class="ascii">TEST ORG</span>)NEWLINE # </div>NEWLINE # </div>NEWLINE if self.part == 'front':NEWLINE name, ascii = short_author_name_set(x)NEWLINE role = short_author_role(x)NEWLINE div = add.div(h, x, classes='author')NEWLINE if role:NEWLINE role = build.span(role, classes=x.get('role'))NEWLINE if name:NEWLINE div.append(wrap_ascii('div', '', name, ascii, role, classes='author-name'))NEWLINE o = x.find('organization')NEWLINE if o != None and o.get('showOnFrontPage') == 'true':NEWLINE org, ascii = short_org_name_set(x)NEWLINE if org:NEWLINE div.append(wrap_ascii('div', '', org, ascii, None, classes='org'))NEWLINE return divNEWLINENEWLINE # 9.7.2. Authors of This DocumentNEWLINE # NEWLINE # As seen in the Authors' Addresses section, at the end of the HTML,NEWLINE # each document author is rendered into an HTML <address> element withNEWLINE # the CSS class "vcard".NEWLINE # NEWLINE # The HTML <address> element will contain an HTML <div> with CSS classNEWLINE # "nameRole". That div will contain an HTML <span> element with CSSNEWLINE # class "fn" containing the value of the "fullname" attribute of theNEWLINE # <author> XML element and an HTML <span> element with CSS class "role"NEWLINE # containing the value of the "role" attribute of the <author> XMLNEWLINE # element (if there is a role). Parentheses will surround the <spanNEWLINE # class="role">, if it exists.NEWLINE # NEWLINE # <address class="vcard">NEWLINE # <div class="nameRole">NEWLINE # <span class="fn">Joe Hildebrand</span>NEWLINE # (<span class="role">editor</span>)NEWLINE # </div>NEWLINE # ...NEWLINE # NEWLINE # After the name, the <organization> and <address> child elements ofNEWLINE # the author are rendered inside the HTML <address> tag.NEWLINE # NEWLINE # When the <author> element, or any of its descendant elements, has anyNEWLINE # attribute that starts with "ascii", all of the author information isNEWLINE # displayed twice. The first version is wrapped in an HTML <div> tagNEWLINE # with class "ascii"; this version prefers the ASCII version ofNEWLINE # information, such as "asciiFullname", but falls back on the non-ASCIINEWLINE # version if the ASCII version doesn't exist. The second version isNEWLINE # wrapped in an HTML <div> tag with class "non-ascii"; this versionNEWLINE # prefers the non-ASCII version of information, such as "fullname", butNEWLINE # falls back on the ASCII version if the non-ASCII version does notNEWLINE # exist. Between these two HTML <div>s, a third <div> is inserted,NEWLINE # with class "alternative-contact", containing the text "AlternateNEWLINE # contact information:".NEWLINE # NEWLINE # <address class="vcard">NEWLINE # <div class="ascii">NEWLINE # <div class="nameRole">NEWLINE # <span class="fn">The ASCII name</span>NEWLINE # </div>NEWLINE # </div>NEWLINE # <div class="alternative-contact">NEWLINE # Alternate contact information:NEWLINE # </div>NEWLINE # <div class="non-ascii">NEWLINE # <div class="nameRole">NEWLINE # <span class="fn">The non-ASCII name</span>NEWLINE # (<span class="role">editor</span>)NEWLINE # </div>NEWLINE # </div>NEWLINE # </address>NEWLINE elif self.part == 'back':NEWLINE # ascii will be set only if name has codepoints not in the Latin script blocksNEWLINE name, ascii = full_author_name_set(x)NEWLINE #NEWLINE addr = add.address(h, x, classes='vcard')NEWLINE #NEWLINE address = x.find('./address')NEWLINE postal = x.find('./address/postal')NEWLINE if address is None:NEWLINE address = lxml.etree.Element('address')NEWLINE x.append(address)NEWLINE if postal is None:NEWLINE # We render author name as part of postal, so make sure it's thereNEWLINE address.insert(0, lxml.etree.Element('postal'))NEWLINE if ascii:NEWLINE ascii_div = add.div(addr, None, classes='ascii')NEWLINE for c in x.getchildren():NEWLINE self.render(ascii_div, c)NEWLINE add.div(addr, None, 'Additional contact information:', classes='alternative-contact')NEWLINE nonasc_div = add.div(addr, None, classes='non-ascii')NEWLINE for c in x.getchildren():NEWLINE self.render(nonasc_div, c)NEWLINE else:NEWLINE for c in x.getchildren():NEWLINE self.render(addr, c)NEWLINE return addrNEWLINENEWLINE # 9.7.3. Authors of ReferencesNEWLINE # NEWLINE # In the output generated from a reference element, author tags areNEWLINE # rendered inside an HTML <span> element with CSS class "refAuthor".NEWLINE # See Section 4.8.6.2 of [RFC7322] for guidance on how author names areNEWLINE # to appear.NEWLINE # NEWLINE # <span class="refAuthor">Flanagan, H.</span> andNEWLINE # <span class="refAuthor">N. Brownlee</span>NEWLINE elif self.part == 'references':NEWLINE prev = x.getprevious()NEWLINE next = x.getnext()NEWLINE role = short_author_role(x)NEWLINE if prev == None or prev.tag != 'author':NEWLINE # single autor or the first author in a listNEWLINE name, ascii = ref_author_name_first(x)NEWLINE span = wrap_ascii('span', '', name, ascii, role, classes='refAuthor')NEWLINE elif prev != None and prev.tag == 'author' and next != None and next.tag == 'author':NEWLINE # not first and not last author in a listNEWLINE name, ascii = ref_author_name_first(x)NEWLINE span = wrap_ascii('span', ', ', name, ascii, role, classes='refAuthor')NEWLINE elif prev != None and prev.tag == 'author' and prev.getprevious() != None and prev.getprevious().tag == 'author':NEWLINE # last author in a list of authorsNEWLINE name, ascii = ref_author_name_last(x)NEWLINE span = wrap_ascii('span', ', and ', name, ascii, role, classes='refAuthor')NEWLINE elif prev != None and prev.tag == 'author':NEWLINE # second author of twoNEWLINE name, ascii = ref_author_name_last(x)NEWLINE span = wrap_ascii('span', ' and ', name, ascii, role, classes='refAuthor')NEWLINE else:NEWLINE self.err(x, "Internal error, unexpected state when rendering authors.")NEWLINE h.append(span)NEWLINE return spanNEWLINENEWLINE else:NEWLINE self.err(x, "Did not expect to be asked to render <%s> while in <%s>" % (x.tag, x.getparent().tag))NEWLINENEWLINE # 9.8. <back>NEWLINE # NEWLINE # If there is exactly one <references> child, render that child in aNEWLINE # similar way to a <section>. If there are more than one <references>NEWLINE # children, render as a <section> whose name is "References",NEWLINE # containing a <section> for each <references> child.NEWLINE # NEWLINE # After any <references> sections, render each <section> child ofNEWLINE # <back> as an appendix.NEWLINE # NEWLINE # <section id="n-references">NEWLINE # <h2 id="s-2">NEWLINE # <a class="selfRef" href="#s-2">2.</a>NEWLINE # <a class="selfRef" href="#n-references">References</a>NEWLINE # </h2>NEWLINE # <section id="n-normative">NEWLINE # <h3 id="s-2.1">NEWLINE # <a class="selfRef" href="#s-2.1">2.1.</a>NEWLINE # <a class="selfRef" href="#n-normative">Normative</a>NEWLINE # </h3>NEWLINE # <dl class="reference"></dl>NEWLINE # </section>NEWLINE # <section id="n-informational">NEWLINE # <h3 id="s-2.2">NEWLINE # <a class="selfRef" href="#s-2.2">2.2.</a>NEWLINE # <a class="selfRef" href="#n-informational">Informational</a>NEWLINE # </h3>NEWLINE # <dl class="reference"></dl>NEWLINE # </section>NEWLINE # </section>NEWLINE # <section id="n-unimportant">NEWLINE # <h2 id="s-A">NEWLINE # <a class="selfRef" href="#s-A">Appendix A.</a>NEWLINE # <a class="selfRef" href="#n-unimportant">Unimportant</a>NEWLINE # </h2>NEWLINE # </section>NEWLINE render_back = skip_rendererNEWLINENEWLINE # 9.9. <bcp14>NEWLINE # NEWLINE # This element marks up words like MUST and SHOULD [BCP14] with an HTMLNEWLINE # <span> element with the CSS class "bcp14".NEWLINE # NEWLINE # You <span class="bcp14">MUST</span> be joking.NEWLINE def render_bcp14(self, h, x):NEWLINE return add.span(h, x, classes='bcp14')NEWLINENEWLINE # 9.10. <blockquote>NEWLINE # NEWLINE # This element renders in a way similar to the HTML <blockquote>NEWLINE # element. If there is a "cite" attribute, it is copied to the HTMLNEWLINE # "cite" attribute. If there is a "quoteFrom" attribute, it is placedNEWLINE # inside a <cite> element at the end of the quote, with an <a> elementNEWLINE # surrounding it (if there is a "cite" attribute), linking to the citedNEWLINE # URL.NEWLINE # NEWLINE # If the <blockquote> does not contain another element that gets aNEWLINE # pilcrow (Section 5.2), a pilcrow is added.NEWLINE # NEWLINE # Note that the "&mdash;" at the beginning of the <cite> element shouldNEWLINE # be a proper emdash, which is difficult to show in the display of theNEWLINE # current format.NEWLINE # NEWLINE # <blockquote id="s-1.2-1"NEWLINE # cite="http://...">NEWLINE # <p id="s-1.2-2">Four score and seven years ago our fathersNEWLINE # brought forth on this continent, a new nation, conceivedNEWLINE # in Liberty, and dedicated to the proposition that all menNEWLINE # are created equal.NEWLINE # <a href="#s-1.2-2" class="pilcrow">&para;</a>NEWLINE # </p>NEWLINE # <cite>&mdash; <a href="http://...">Abraham Lincoln</a></cite>NEWLINE # </blockquote>NEWLINE def render_blockquote(self, h, x):NEWLINE frm = x.get('quotedFrom')NEWLINE cite = x.get('cite')NEWLINE quote = add.blockquote(h, x)NEWLINE if cite:NEWLINE quote.set('cite', cite)NEWLINE for c in x.getchildren():NEWLINE self.render(quote, c)NEWLINE self.maybe_add_pilcrow(quote)NEWLINE if frm:NEWLINE if cite:NEWLINE frm = build.a(frm, href=cite)NEWLINE add.cite(quote, None, mdash, ' ', frm)NEWLINE return quoteNEWLINENEWLINE # 9.11. <boilerplate>NEWLINE # NEWLINE # The Status of This Memo and the Copyright statement, togetherNEWLINE # commonly referred to as the document boilerplate, appear after theNEWLINE # Abstract. The children of the input <boilerplate> element areNEWLINE # treated in a similar fashion to unnumbered sections.NEWLINE # NEWLINE # <section id="status-of-this-memo">NEWLINE # <h2 id="s-boilerplate-1">NEWLINE # <a href="#status-of-this-memo" class="selfRef">NEWLINE # Status of this Memo</a>NEWLINE # </h2>NEWLINE # <p id="s-boilerplate-1-1">This Internet-Draft is submitted in fullNEWLINE # conformance with the provisions of BCP 78 and BCP 79.NEWLINE # <a href="#s-boilerplate-1-1" class="pilcrow">&para;</a>NEWLINE # </p>NEWLINE # ...NEWLINE render_boilerplate = skip_rendererNEWLINENEWLINE # 9.12. <br>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart. Note: inNEWLINE # HTML, <br> does not have a closing slash.NEWLINE ## Removed from schemaNEWLINE def render_br(self, h, x):NEWLINE return add.br(h, x)NEWLINENEWLINE # NEWLINE # 9.13. <city>NEWLINE # NEWLINE # This element is rendered as a <span> element with CSS classNEWLINE # "locality".NEWLINE # NEWLINE # <span class="locality">Guilford</span>NEWLINE def render_city(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='locality')NEWLINENEWLINE def render_cityarea(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='locality')NEWLINENEWLINE # NEWLINE # 9.14. <code>NEWLINE # NEWLINE # This element is rendered as a <span> element with CSS class "postal-NEWLINE # code".NEWLINE # NEWLINE # <span class="postal-code">GU16 7HF<span>NEWLINE def render_code(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='postal-code')NEWLINENEWLINE def render_sortingcode(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='postal-code')NEWLINENEWLINE # 9.15. <country>NEWLINE # NEWLINE # This element is rendered as a <div> element with CSS class "country-NEWLINE # name".NEWLINE # NEWLINE # <div class="country-name">England</div>NEWLINE def render_country(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='country-name')NEWLINENEWLINE # 9.16. <cref>NEWLINE # NEWLINE # This element is rendered as a <span> element with CSS class "cref".NEWLINE # Any anchor is copied to the "id" attribute. If there is a sourceNEWLINE # given, it is contained inside the "cref" <span> element with anotherNEWLINE # <span> element of class "crefSource".NEWLINE # NEWLINE # <span class="cref" id="crefAnchor">Just a brief commentNEWLINE # about something that we need to remember later.NEWLINE # <span class="crefSource">--life</span></span>NEWLINE def render_cref(self, h, x):NEWLINE span = add.span(h, x, classes='cref')NEWLINE disp = x.get('display') == 'true'NEWLINE if disp:NEWLINE for c in x.getchildren():NEWLINE self.render(span, c)NEWLINE source = x.get('source')NEWLINE if source:NEWLINE add.span(span, None, source, classes='crefSource')NEWLINE return spanNEWLINENEWLINE # 9.17. <date>NEWLINE # NEWLINE # This element is rendered as the HTML <time> element. If the "year",NEWLINE # "month", or "day" attribute is included on the XML element, anNEWLINE # appropriate "datetime" element will be generated in HTML.NEWLINE # NEWLINE # If this date is a child of the document's <front> element, it getsNEWLINE # the CSS class "published".NEWLINE # NEWLINE # If this date is inside a <reference> element, it gets the CSS classNEWLINE # "refDate".NEWLINE # NEWLINE # <time datetime="2014-10" class="published">October 2014</time>NEWLINE def render_date(self, h, x):NEWLINE parts = extract_date(x, self.options.date)NEWLINE text = format_date(*parts, legacy=self.options.legacy_date_format)NEWLINE datetime = format_date_iso(*parts)NEWLINE time = add.time(h, x, text, datetime=datetime)NEWLINE if x.getparent() == self.root.find('front'):NEWLINE time.set('class', 'published')NEWLINE return timeNEWLINENEWLINE # 9.18. <dd>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE def render_dd(self, h, x):NEWLINE indent = x.getparent().get('indent')NEWLINE style = 'margin-left: %.1fem' % (int(indent)*0.5) if indent else NoneNEWLINE dd = add.dd(h, x, style=style)NEWLINE for c in x.getchildren():NEWLINE self.render(dd, c)NEWLINE return ddNEWLINENEWLINE # 9.19. <displayreference>NEWLINE # NEWLINE # This element does not affect the HTML output, but it is used in theNEWLINE # generation of the <reference>, <referencegroup>, <relref>, and <xref>NEWLINE # elements.NEWLINE render_displayreference = null_rendererNEWLINENEWLINE # 9.20. <dl>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE # NEWLINE # If the hanging attribute is "false", add the "dlParallel" class, elseNEWLINE # add the "dlHanging" class.NEWLINE # NEWLINE # If the spacing attribute is "compact", add the "dlCompact" class.NEWLINE def render_dl(self, h, x):NEWLINE newline = x.get('newline')NEWLINE spacing = x.get('spacing')NEWLINE classes = []NEWLINE if newline == 'true':NEWLINE classes.append('dlNewline')NEWLINE elif newline == 'false':NEWLINE classes.append('dlParallel')NEWLINE if spacing == 'compact':NEWLINE classes.append('dlCompact')NEWLINE classes = ' '.join(classes)NEWLINE dl = add.dl(h, x, classes=classes)NEWLINE for c in x.getchildren():NEWLINE self.render(dl, c)NEWLINE return dlNEWLINENEWLINE # 9.21. <dt>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_dt = default_rendererNEWLINENEWLINE # NEWLINE # 9.22. <em>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_em = default_rendererNEWLINENEWLINE # 9.23. <email>NEWLINE # NEWLINE # This element is rendered as an HTML <div> containing the stringNEWLINE # "Email:" and an HTML <a> element with the "href" attribute set to theNEWLINE # equivalent "mailto:" URI, a CSS class of "email", and the contentsNEWLINE # set to the email address. If this is the version of the address withNEWLINE # ASCII, the "ascii" attribute is preferred to the element text.NEWLINE # NEWLINE # <div>NEWLINE # <span>Email:</span>NEWLINE # <a class="email" href="mailto:joe@example.com">joe@example.com</a>NEWLINE # </div>NEWLINE def render_email(self, h, x):NEWLINE value = x.text.strip()NEWLINE cls = 'email'NEWLINE div = add.div(h, None,NEWLINE build.span("Email:"), '\n',NEWLINE build.a(value, href='mailto:%s'%value, classes=cls),NEWLINE classes=cls,NEWLINE )NEWLINE return divNEWLINENEWLINE # NEWLINE # 9.24. <eref>NEWLINE # NEWLINE # This element is rendered as an HTML <a> element, with the "href"NEWLINE # attribute set to the value of the "target" attribute and the CSSNEWLINE # class of "eref".NEWLINE # NEWLINE # <a href="https://..." class="eref">the text</a>NEWLINE def render_eref(self, h, x):NEWLINE target = x.get('target')NEWLINE if x.text:NEWLINE hh = add.a(h, x, href=target) NEWLINE else:NEWLINE hh = add.span(h, x, build.a(target, href=target))NEWLINE return hhNEWLINENEWLINE # 9.25. <figure>NEWLINE # NEWLINE # This element renders as the HTML <figure> element, containing theNEWLINE # artwork or sourcecode indicated and an HTML <figcaption> element.NEWLINE # The <figcaption> element will contain an <a> element around theNEWLINE # figure number. It will also contain another <a> element with CSSNEWLINE # class "selfRef" around the figure name, if a name was given.NEWLINE # NEWLINE # <figure id="f-1">NEWLINE # ...NEWLINE # <figcaption>NEWLINE # <a href="#f-1">Figure 1.</a>NEWLINE # <a href="#n-it-figures" id="n-it-figures" class="selfRef">NEWLINE # It figuresNEWLINE # </a>NEWLINE # </figcaption>NEWLINE # </figure>NEWLINE def render_figure(self, h, x):NEWLINE name = x.find('name')NEWLINE if name != None and name.text:NEWLINE add.span(h, None, id=name.get('slugifiedName'))NEWLINE figure = add.figure(h, x)NEWLINE for c in x.iterchildren('artset', 'artwork', 'sourcecode'):NEWLINE self.render(figure, c)NEWLINE pn = x.get('pn')NEWLINE caption = add.figcaption(figure, None)NEWLINE a = add.a(caption, None, pn.replace('-',' ',1).title(), href='#%s'%pn)NEWLINE if name != None and name.text:NEWLINE a.tail = ':\n'NEWLINE a = add.a(caption, None, href='#%s'%name.get('slugifiedName'), classes='selfRef')NEWLINE self.inline_text_renderer(a, name)NEWLINE return figureNEWLINENEWLINE # 9.26. <front>NEWLINE # NEWLINE # See "Document Information" (Section 6.5) for information on thisNEWLINE # element.NEWLINE def render_front(self, h, x):NEWLINE if self.part == 'front':NEWLINE # 6.5. Document InformationNEWLINE # NEWLINE # Information about the document as a whole will appear as the firstNEWLINE # child of the HTML <body> element, embedded in an HTML <dl> elementNEWLINE # with id="identifiers". The defined terms in the definition list areNEWLINE # "Workgroup:", "Series:", "Status:", "Published:", and "Author:" orNEWLINE # "Authors:" (as appropriate). For example:NEWLINE # NEWLINE # <dl id="identifiers">NEWLINE # <dt>Workgroup:</dt>NEWLINE # <dd class="workgroup">rfc-interest</dd>NEWLINE # <dt>Series:</dt>NEWLINE # <dd class="series">Internet-Draft</dd>NEWLINE # <dt>Status:</dt>NEWLINE # <dd class="status">Informational</dd>NEWLINE # <dt>Published:</dt>NEWLINE # <dd><time datetime="2014-10-25"NEWLINE # class="published">2014-10-25</time></dd>NEWLINE # <dt>Authors:</dt>NEWLINE # <dd class="authors">NEWLINE # <div class="author">NEWLINE # <span class="initial">J.</span>NEWLINE # <span class="surname">Hildebrand</span>NEWLINE # (<span class="organization">Cisco Systems, Inc.</span>)NEWLINE # <span class="editor">Ed.</span>NEWLINE # </div>NEWLINE # <div class="author">NEWLINE # <span class="initial">H.</span>NEWLINE # <span class="surname">Flanagan</span>NEWLINE # (<span class="organization">RFC Editor</span>)NEWLINE # </div>NEWLINE # </dd>NEWLINE # </dl>NEWLINE # NEWLINENEWLINE # Now, a text format RFC has the following information, optionals inNEWLINE # parentheses:NEWLINE # NEWLINE # If RFC:NEWLINE # * <Stream Name>NEWLINE # * Request for Comments: <Number>NEWLINE # * (STD|BCP|FYI: <Number>)NEWLINE # * (Obsoletes: <Number>[, <Number>]*)NEWLINE # * (Updates: <Number>[, <Number>]*)NEWLINE # * Category: <Category Name>NEWLINE # * ISSN: 2070-1721NEWLINE # else:NEWLINE # * <Workgroup name> or "Network Working Group"NEWLINE # * Internet-DraftNEWLINE # * (STD|BCP|FYI: <Number> (if approved))NEWLINE # * (Obsoletes: <Number>[, <Number>]*)NEWLINE # * (Updates: <Number>[, <Number>]*)NEWLINE # * Intended Status: <Cagegory Name>NEWLINE # * Expires: <Date>NEWLINENEWLINE def entry(dl, name, value):NEWLINE if value != None:NEWLINE cls = slugify(name)NEWLINE dl.append( build.dt('%s:'%name, classes='label-%s'%cls))NEWLINE dl.append( build.dd(value, classes=cls))NEWLINE #NEWLINE dl = build.dl(id='identifiers')NEWLINE h.append( build.div(dl, classes='document-information' ))NEWLINE if self.options.rfc:NEWLINE # StreamNEWLINE stream = self.root.get('submissionType')NEWLINE entry(dl, 'Stream', strings.stream_name[stream])NEWLINE # Series infoNEWLINE for series in x.xpath('./seriesInfo'):NEWLINE self.render_seriesinfo(dl, series)NEWLINE for section in ['obsoletes', 'updates']:NEWLINE items = self.root.get(section)NEWLINE if items:NEWLINE alist = []NEWLINE for num in items.split(','):NEWLINE num = num.strip()NEWLINE a = build.a(num, href=os.path.join(self.options.rfc_base_url, 'rfc%s.txt'%num), classes='eref')NEWLINE a.tail = ' 'NEWLINE alist.append(a)NEWLINE entry(dl, section.title(), *alist)NEWLINENEWLINE category = self.root.get('category', '')NEWLINE if category:NEWLINE entry(dl, 'Category', strings.category_name[category])NEWLINE # Publication dateNEWLINE entry(dl, 'Published', self.render_date(None, x.find('date')))NEWLINE # ISSNNEWLINE entry(dl, 'ISSN', '2070-1721')NEWLINENEWLINE else:NEWLINE # WorkgroupNEWLINE for wg in x.xpath('./workgroup'):NEWLINE entry(dl, 'Workgroup', wg.text)NEWLINE # Internet-DraftNEWLINE for series in x.xpath('./seriesInfo'):NEWLINE entry(dl, series.get('name'), series.get('value'))NEWLINE # Obsoletes and UpdatesNEWLINE for section in ['obsoletes', 'updates']:NEWLINE items = self.root.get(section)NEWLINE if items:NEWLINE for num in items.split(','):NEWLINE num = num.strip()NEWLINE a = build.a(num, href=os.path.join(self.options.rfc_base_url, 'rfc%s.txt'%num), classes='eref')NEWLINE a.tail = ' 'NEWLINE entry(dl, section.title(), a)NEWLINE a.tail += '(if approved)'NEWLINE # Publication dateNEWLINE entry(dl, 'Published', self.render_date(None, x.find('date')))NEWLINE # Intended categoryNEWLINE category = self.root.get('category', '')NEWLINE if category:NEWLINE entry(dl, 'Intended Status', strings.category_name[category])NEWLINE # Expiry dateNEWLINE if self.root.get('ipr') != 'none':NEWLINE exp = get_expiry_date(self.root, self.date)NEWLINE expdate = build.date(year=str(exp.year), month=str(exp.month))NEWLINE if exp.day:NEWLINE expdate.set('day', str(exp.day))NEWLINE entry(dl, 'Expires', self.render_date(None, expdate))NEWLINENEWLINE authors = x.xpath('./author')NEWLINE dl.append( build.dt('Authors:' if len(authors)>1 else 'Author:', classes='label-authors' ))NEWLINE dd = add.dd(dl, None, classes='authors')NEWLINE for a in authors:NEWLINE self.render(dd, a)NEWLINENEWLINE for c in x.iterchildren('title', 'abstract', 'note', 'boilerplate'):NEWLINE self.render(h, c)NEWLINENEWLINE elif self.part == 'references':NEWLINE self.default_renderer(h, x)NEWLINE else:NEWLINE self.err(x, "Did not expect to be asked to render <%s> while in <%s> (self.part: %s)" % (x.tag, x.getparent().tag, self.part))NEWLINE NEWLINENEWLINE # 9.27. <iref>NEWLINE # NEWLINE # This element is rendered as an empty <> tag of class "iref", with anNEWLINE # "id" attribute consisting of the <iref> element's "irefid" attribute:NEWLINE # NEWLINE # <span class="iref" id="s-Paragraphs-first-1"/>NEWLINE def render_iref(self, h, x):NEWLINE span = add.span(None, x, classes='iref', id=x.get('pn'))NEWLINE if h.tag in ['table', ]:NEWLINE h.addprevious(span)NEWLINE else:NEWLINE h.append(span)NEWLINE return spanNEWLINENEWLINE # 9.28. <keyword>NEWLINE # NEWLINE # Each <keyword> element renders its text into the <meta> keywords inNEWLINE # the document's header, separated by commas.NEWLINE # NEWLINE # <meta name="keywords" content="html,css,rfc">NEWLINE # NEWLINE # 9.29. <li>NEWLINE # NEWLINE # This element is rendered as its HTML counterpart. However, if thereNEWLINE # is no contained element that has a pilcrow (Section 5.2) attached, aNEWLINE # pilcrow is added.NEWLINE # NEWLINE # <li id="s-2-7">Item <a href="#s-2-7" class="pilcrow">&para;</a></li>NEWLINE def render_li_ul(self, h, x):NEWLINE li = add.li(h, x, classes=h.get('class'))NEWLINE for c in x.getchildren():NEWLINE self.render(li, c)NEWLINE self.maybe_add_pilcrow(li)NEWLINE return liNEWLINENEWLINE def render_li(self, h, x):NEWLINE if h.tag == 'ul':NEWLINE li = self.render_li_ul(h, x)NEWLINE elif h.tag == 'dl':NEWLINE li = self.render_li_dl(h, x)NEWLINE elif h.tag == 'ol':NEWLINE li = self.render_li_ol(h, x)NEWLINE else:NEWLINE self.err(x, "Did not expect to be asked to render <%s> while in <%s>" % (x.tag, h.tag))NEWLINE li = NoneNEWLINE return liNEWLINENEWLINE # 9.30. <link>NEWLINE # NEWLINE # This element is rendered as its HTML counterpart, in the HTML header.NEWLINE def render_link(self, h, x):NEWLINE link = add.link(h, x, rel=x.get('rel'), href=x.get('href'))NEWLINE return linkNEWLINE NEWLINE # 9.31. <middle>NEWLINE # NEWLINE # This element does not add any direct output to HTML.NEWLINE render_middle = skip_rendererNEWLINENEWLINE ## Potential extension: <math>NEWLINE ##NEWLINE ## Same content as for instance <name>, but may contain unicodeNEWLINE ## characters of categories L*, P*, Sm, Sk or Zs. For categories L*, the scriptNEWLINE ## must be either Common, Greek, or Hebrew.NEWLINE ##NEWLINE ## def render_math(self, s, x):NEWLINE ## for t in x.itertext():NEWLINE ## for c in t:NEWLINE ## cat = unicode.category(c)NEWLINE ## if cat.beginswith('L'):NEWLINE ## scr = get_script(c)NEWLINE ## if not scr in ['Common', 'Greek', 'Hebrew', ]:NEWLINE ## self.err(x, ...)NEWLINE ## div = add.div(h, x, classes="inline-math")NEWLINE ## for c in x.getchildren():NEWLINE ## self.render(div, c)NEWLINE ##NEWLINENEWLINE # 9.32. <name>NEWLINE # NEWLINE # This element is never rendered directly; it is only rendered whenNEWLINE # considering a parent element, such as <figure>, <references>,NEWLINE # <section>, or <table>.NEWLINE def render_name(self, s, x):NEWLINE p = x.getparent()NEWLINE if p.tag in [ 'note', 'section', 'references' ]:NEWLINE pn = p.get('pn')NEWLINE prefix, number = pn.split('-', 1)NEWLINE number += '.'NEWLINE if re.search(r'^[a-z]', number):NEWLINE num = number.split('.', 1)[1]NEWLINE else:NEWLINE num = numberNEWLINE level = min([6, len(num.split('.')) ])NEWLINE tag = 'h%d' % levelNEWLINE h = build(tag, id=x.get('slugifiedName'))NEWLINE s.append(h)NEWLINE #NEWLINE numbered = p.get('numbered') or ('true' if p.tag == 'references' else 'false')NEWLINE if numbered == 'true':NEWLINE if number.startswith('appendix'):NEWLINE number = number.replace('.', ' ', 1).title()NEWLINE elif re.search('^[a-z]', number):NEWLINE number = number.title()NEWLINE a_number = build.a(number, '\u00a0', href='#%s'%pn, classes='section-number selfRef')NEWLINE h.append( a_number)NEWLINE a_title = build.a(href='#%s'%x.get('slugifiedName'), classes='section-name selfRef')NEWLINE self.inline_text_renderer(a_title, x)NEWLINE h.append(a_title)NEWLINE return hNEWLINE elif p.tag in [ 'table', 'figure' ]:NEWLINE return NoneNEWLINE else:NEWLINE self.warn(x, "Did not expect to be asked to render <%s> while in <%s>" % (x.tag, x.getparent().tag))NEWLINE self.default_renderer(s, x)NEWLINENEWLINENEWLINE # 9.33. <note>NEWLINE # NEWLINE # This element is rendered like a <section> element, but without aNEWLINE # section number and with the CSS class of "note". If theNEWLINE # "removeInRFC" attribute is set to "yes", the generated <div> elementNEWLINE # will also include the CSS class "rfcEditorRemove".NEWLINE # NEWLINE # <section id="s-note-1" class="note rfcEditorRemove">NEWLINE # <h2>NEWLINE # <a href="#n-editorial-note" class="selfRef">Editorial Note</a>NEWLINE # </h2>NEWLINE # <p id="s-note-1-1">NEWLINE # Discussion of this draft takes place...NEWLINE # <a href="#s-note-1-1" class="pilcrow">&para;</a>NEWLINE # </p>NEWLINE # </section>NEWLINE def render_note(self, h, x):NEWLINE classes = 'note'NEWLINE if x.get('removeInRFC') == 'true':NEWLINE classes += ' rfcEditorRemove'NEWLINE section = add.section(h, x, classes=classes)NEWLINE for c in x.getchildren():NEWLINE self.render(section, c)NEWLINE return sectionNEWLINENEWLINE # 9.34. <ol>NEWLINE # NEWLINE # The output created from an <ol> element depends upon the "style"NEWLINE # attribute.NEWLINE # NEWLINE # If the "spacing" attribute has the value "compact", a CSS class ofNEWLINE # "olCompact" will be added.NEWLINE # NEWLINE # The group attribute is not copied; the input XML should have startNEWLINE # values added by a prep tool for all grouped <ol> elements.NEWLINE def render_ol(self, h, x):NEWLINE type = x.get('type')NEWLINE if len(type) > 1 and '%' in type:NEWLINE ol = add.dl(h, x, classes='olPercent')NEWLINE else:NEWLINE attrib = dict([ (k,v) for (k,v) in x.attrib.items() if k in ['start', 'type', ] ])NEWLINE ol = add.ol(h, x, classes=x.get('spacing'), **attrib)NEWLINE for c in x.getchildren():NEWLINE self.render(ol, c)NEWLINE return olNEWLINENEWLINE # 9.34.1. Percent StylesNEWLINE # NEWLINE # If the style attribute includes the character "%", the output is aNEWLINE # <dl> tag with the class "olPercent". Each contained <li> element isNEWLINE # emitted as a <dt>/<dd> pair, with the generated label in the <dt> andNEWLINE # the contents of the <li> in the <dd>.NEWLINE # NEWLINE # <dl class="olPercent">NEWLINE # <dt>Requirement xviii:</dt>NEWLINE # <dd>Wheels on a big rig</dd>NEWLINE # </dl>NEWLINE def render_li_dl(self, h, x):NEWLINE label = x.get('derivedCounter')NEWLINE dt = add.dt(h, None, label)NEWLINE dd = add.dd(h, x)NEWLINE for c in x.getchildren():NEWLINE self.render(dd, c)NEWLINE self.maybe_add_pilcrow(dd)NEWLINE return dt, ddNEWLINENEWLINE # 9.34.2. Standard StylesNEWLINE # NEWLINE # For all other styles, an <ol> tag is emitted, with any "style"NEWLINE # attribute turned into the equivalent HTML attribute.NEWLINE # NEWLINE # <ol class="compact" type="I" start="18">NEWLINE # <li>Wheels on a big rig</li>NEWLINE # </ol>NEWLINE def render_li_ol(self, h, x):NEWLINE li = add.li(h, x)NEWLINE for c in x.getchildren():NEWLINE self.render(li, c)NEWLINE self.maybe_add_pilcrow(li)NEWLINE return liNEWLINENEWLINE # 9.35. <organization>NEWLINE # NEWLINE # This element is rendered as an HTML <div> tag with CSS class "org".NEWLINE # NEWLINE # If the element contains the "ascii" attribute, the organization nameNEWLINE # is rendered twice: once with the non-ASCII version wrapped in an HTMLNEWLINE # <span> tag of class "non-ascii" and then as the ASCII version wrappedNEWLINE # in an HTML <span> tag of class "ascii" wrapped in parentheses.NEWLINE # NEWLINE # <div class="org">NEWLINE # <span class="non-ascii">Test Org</span>NEWLINE # (<span class="ascii">TEST ORG</span>)NEWLINE # </div>NEWLINE render_organization = null_renderer # handled in render_addressNEWLINENEWLINE # 9.36. <phone>NEWLINE # NEWLINE # This element is rendered as an HTML <div> tag containing the stringNEWLINE # "Phone:" (wrapped in a span), an HTML <a> tag with CSS class "tel"NEWLINE # containing the phone number (and an href with a corresponding "tel:"NEWLINE # URI), and an HTML <span> with CSS class "type" containing the stringNEWLINE # "VOICE".NEWLINE # NEWLINE # <div>NEWLINE # <span>Phone:</span>NEWLINE # <a class="tel" href="tel:+1-720-555-1212">+1-720-555-1212</a>NEWLINE # <span class="type">VOICE</span>NEWLINE # </div>NEWLINE def render_phone(self, h, x):NEWLINE # The content of <span class="type">VOICE</span> seems to violate theNEWLINE # vcard types (they identify things like 'Home', 'Work', etc) andNEWLINE # will be skipped. The NEWLINE if not x.text:NEWLINE return NoneNEWLINE value = x.text.strip()NEWLINE cls = 'tel'NEWLINE div = add.div(h, None,NEWLINE build.span("Phone:"), '\n',NEWLINE build.a(value, href='tel:%s'%value, classes=cls),NEWLINE classes=cls,NEWLINE )NEWLINE return divNEWLINENEWLINE # 9.37. <postal>NEWLINE # NEWLINE # This element renders as an HTML <div> with CSS class "adr", unless itNEWLINE # contains one or more <postalLine> child elements; in which case, itNEWLINE # renders as an HTML <pre> element with CSS class "label".NEWLINE # NEWLINE # When there is no <postalLine> child, the following child elements areNEWLINE # rendered into the HTML:NEWLINE # NEWLINE # o Each <street> is renderedNEWLINE # NEWLINE # o A <div> that includes:NEWLINE # NEWLINE # * The rendering of all <city> elementsNEWLINE # NEWLINE # * A comma and a space: ", "NEWLINE # NEWLINE # * The rendering of all <region> elementsNEWLINE # NEWLINE # * WhitespaceNEWLINE # NEWLINE # * The rendering of all <code> elementsNEWLINE # NEWLINE # o The rendering of all <country> elementsNEWLINE # NEWLINE # <div class="adr">NEWLINE # <div class="street-address">1 Main Street</div>NEWLINE # <div class="street-address">Suite 1</div>NEWLINE # <div>NEWLINE # <span class="city">Denver</span>,NEWLINE # <span class="region">CO</span>NEWLINE # <span class="postal-code">80212</span>NEWLINE # </div>NEWLINE # <div class="country-name">United States of America</div>NEWLINE # </div>NEWLINE ##NEWLINE ## Much of the description above is much too americentric, and alsoNEWLINE ## conflicts with hCard. Examples from hCard will be used instead,NEWLINE ## and addresses rendered in a format appropriate for their country.NEWLINE ## NEWLINE ## <span class="adr">NEWLINE ## <span class="street-address">12 rue Danton</span>NEWLINE ## <span class="postal-code">94270</span>NEWLINE ## <span class="locality">Le Kremlin-Bicetre</span>NEWLINE ## <span class="country-name">France</span>NEWLINE ## </span> NEWLINE def render_postal(self, h, x):NEWLINE latin = h.get('class') == 'ascii'NEWLINE adr = get_normalized_address_info(self, x, latin=latin)NEWLINE if adr:NEWLINE align = 'left' if latin else get_bidi_alignment(adr)NEWLINE for item in format_address(adr, latin=latin):NEWLINE item.set('class', align)NEWLINE h.append(item)NEWLINE else:NEWLINE # render elements in found orderNEWLINE for c in x.getchildren():NEWLINE self.render(h, c)NEWLINENEWLINE # 9.38. <postalLine>NEWLINE # NEWLINE # This element renders as the text contained by the element, followedNEWLINE # by a newline. However, the last <postalLine> in a given <postal>NEWLINE # element should not be followed by a newline. For example:NEWLINE # NEWLINE # <postal>NEWLINE # <postalLine>In care of:</postalLine>NEWLINE # <postalLine>Computer Sciences Division</postalLine>NEWLINE # </postal>NEWLINE # NEWLINE # Would be rendered as:NEWLINE # NEWLINE # <pre class="label">In care of:NEWLINE # Computer Sciences Division</pre>NEWLINE def render_postalline(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='extended-address')NEWLINENEWLINE # 9.39. <refcontent>NEWLINE # NEWLINE # This element renders as an HTML <span> with CSS class "refContent".NEWLINE # NEWLINE # <span class="refContent">Self-published pamphlet</span>NEWLINE def render_refcontent(self, h, x):NEWLINE span = add.span(h, x, classes='refContent')NEWLINE for c in x.getchildren():NEWLINE self.render(span, c)NEWLINE return spanNEWLINENEWLINENEWLINE # 9.40. <reference>NEWLINE # NEWLINE # If the parent of this element is not a <referencegroup>, this elementNEWLINE # will render as a <dt> <dd> pair with the defined term being theNEWLINE # reference "anchor" attribute surrounded by square brackets and theNEWLINE # definition including the correct set of bibliographic information asNEWLINE # specified by [RFC7322]. The <dt> element will have an "id" attributeNEWLINE # of the reference anchor.NEWLINE # NEWLINE # <dl class="reference">NEWLINE # <dt id="RFC5646">[RFC5646]</dt>NEWLINE # <dd>NEWLINE # <span class="refAuthor">Phillips, A.</span>NEWLINE # <span>and</span>NEWLINE # <span class="refAuthor">M. Davis</span>NEWLINE # <span class="refTitle">"Tags for Identifying Languages"</span>,NEWLINE # ...NEWLINE # </dd>NEWLINE # </dl>NEWLINE # NEWLINE # If the child of a <referencegroup>, this element renders as a <div>NEWLINE # of class "refInstance" whose "id" attribute is the value of theNEWLINE # <source> element's "anchor" attribute.NEWLINE # NEWLINE # <div class="refInstance" id="RFC5730">NEWLINE # ...NEWLINE # </div>NEWLINE def render_reference(self, h, x):NEWLINE p = x.getparent()NEWLINE if p.tag == 'referencegroup':NEWLINE div = add.div(h, x, classes='refInstance')NEWLINE outer = divNEWLINE inner = divNEWLINE elif p.tag != 'referencegroup':NEWLINE dt = add.dt(h, x, '[%s]'%x.get('derivedAnchor'))NEWLINE dd = add.dd(h, None)NEWLINE outer = dt, ddNEWLINE inner = ddNEWLINE else:NEWLINE self.err(x, "Did not expect to be asked to render <%s> while in <%s>" % (x.tag, x.getparent().tag))NEWLINE # Deal with parts in the correct orderNEWLINE for c in x.iterdescendants('author'):NEWLINE self.render(inner, c)NEWLINE for ctag in ('title', 'refcontent', 'stream', 'seriesInfo', 'date', ):NEWLINE for c in x.iterdescendants(ctag):NEWLINE if len(inner):NEWLINE inner[-1].tail = ', 'NEWLINE self.render(inner, c)NEWLINE if p.tag != 'referencegroup':NEWLINE target = x.get('target')NEWLINE if target:NEWLINE inner.append( build.span(', <', build.a(target, href=target), '>') )NEWLINE if len(inner):NEWLINE inner[-1].tail = '. 'NEWLINE for ctag in ('annotation', ):NEWLINE for c in x.iterdescendants(ctag):NEWLINE self.render(inner, c)NEWLINE #NEWLINE return outerNEWLINENEWLINE # 9.41. <referencegroup>NEWLINE # NEWLINE # A <referencegroup> is translated into a <dt> <dd> pair, with theNEWLINE # defined term being the referencegroup "anchor" attribute surroundedNEWLINE # by square brackets, and the definition containing the translatedNEWLINE # output of all of the child <reference> elements.NEWLINE # NEWLINE # <dt id="STD69">[STD69]</dt>NEWLINE # <dd>NEWLINE # <div class="refInstance" id="RFC5730">NEWLINE # <span class="refAuthor">Hollenbeck, S.</span>NEWLINE # ...NEWLINE # </div>NEWLINE # <div class="refInstance" id="RFC5731">NEWLINE # <span class="refAuthor">Hollenbeck, S.</span>NEWLINE # ...NEWLINE # </div>NEWLINE # ...NEWLINE # </dd>NEWLINE def render_referencegroup(self, h, x):NEWLINE dt = add.dt(h, x, '[%s]'%x.get('derivedAnchor'))NEWLINE dd = add.dd(h, None)NEWLINE for c in x.getchildren():NEWLINE self.render(dd, c)NEWLINE target = x.get('target')NEWLINE if target:NEWLINE dd.append( build.span('<', build.a(target, href=target), '>') )NEWLINE return dt, ddNEWLINENEWLINE # 9.42. <references>NEWLINE # NEWLINE # If there is at exactly one <references> element, a section is addedNEWLINE # to the document, continuing with the next section number after theNEWLINE # last top-level <section> in <middle>. The <name> element of theNEWLINE # <references> element is used as the section name.NEWLINE # NEWLINE # <section id="n-my-references">NEWLINE # <h2 id="s-3">NEWLINE # <a href="#s-3" class="selfRef">3.</a>NEWLINE # <a href="#n-my-references class="selfRef">My References</a>NEWLINE # </h2>NEWLINE # ...NEWLINE # </section>NEWLINE # NEWLINE # If there is more than one <references> element, an HTML <section>NEWLINE # element is created to contain a subsection for each of theNEWLINE # <references>. The section number will be the next section numberNEWLINE # after the last top-level <section> in <middle>. The name of thisNEWLINE # section will be "References", and its "id" attribute will beNEWLINE # "n-references".NEWLINE # NEWLINE # <section id="n-references">NEWLINE # <h2 id="s-3">NEWLINE # <a href="#s-3" class="selfRef">3.</a>NEWLINE # <a href="#n-references" class="selfRef">References</a>NEWLINE # </h2>NEWLINE # <section id="n-informative-references">NEWLINE # <h3 id="s-3.1">NEWLINE # <a href="#s-3.1" class="selfRef">3.1.</a>NEWLINE # <a href="#n-informative-references" class="selfRef">NEWLINE # Informative References</a></h3>NEWLINE # <dl class="reference">...NEWLINE # </dl>NEWLINE # </section>NEWLINE # ...NEWLINE # </section>NEWLINE def render_references(self, h, x):NEWLINE self.part = x.tagNEWLINE section = add.section(h, x)NEWLINE hh = sectionNEWLINE for c in x.getchildren():NEWLINE if c.tag in ['reference', 'referencegroup'] and hh.tag != 'dl':NEWLINE hh = add.dl(hh, None, classes='references')NEWLINE self.render(hh, c)NEWLINE return sectionNEWLINENEWLINE # NEWLINE # 9.43. <region>NEWLINE # NEWLINE # This element is rendered as a <span> tag with CSS class "region".NEWLINE # NEWLINE # <span class="region">Colorado</span>NEWLINE def render_region(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='region')NEWLINENEWLINE # NEWLINE # 9.44. <relref>NEWLINE # NEWLINE # This element is rendered as an HTML <a> tag with CSS class "relref"NEWLINE # and "href" attribute of the "derivedLink" attribute of the element.NEWLINE # Different values of the "displayFormat" attribute cause the textNEWLINE # inside that HTML <a> tag to change and cause extra text to beNEWLINE # generated. Some values of the "displayFormat" attribute also causeNEWLINE # another HTML <a> tag to be rendered with CSS class "xref" and anNEWLINE # "href" of "#" and the "target" attribute (modified by any applicableNEWLINE # <displayreference> XML element) and text inside of the "target"NEWLINE # attribute (modified by any applicable <displayreference> XMLNEWLINE # element). When used, this <a class='xref'> HTML tag is alwaysNEWLINE # surrounded by square brackets, for example, "[<a class='xref'NEWLINE # href='#foo'>foo</a>]".NEWLINENEWLINE ## Deprecated, removed by preptoolNEWLINENEWLINENEWLINE # 9.44.1. displayFormat='of'NEWLINE # NEWLINE # The output is an <a class='relref'> HTML tag, with contents ofNEWLINE # "Section " and the value of the "section" attribute. This isNEWLINE # followed by the word "of" (surrounded by whitespace). This isNEWLINE # followed by the <a class='xref'> HTML tag (surrounded by squareNEWLINE # brackets).NEWLINE # NEWLINE # For example, with an input of:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="of"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"/>NEWLINE # for an overview.NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See <a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">SectionNEWLINE # 2.3</a> of [<a class="xref" href="#RFC9999">RFC9999</a>]NEWLINE # for an overview.NEWLINE # NEWLINE # 9.44.2. displayFormat='comma'NEWLINE # NEWLINE # The output is an <a class='xref'> HTML tag (wrapped by squareNEWLINE # brackets), followed by a comma (","), followed by whitespace,NEWLINE # followed by an <a class='relref'> HTML tag, with contents ofNEWLINE # "Section " and the value of the "section" attribute.NEWLINE # NEWLINE # For example, with an input of:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="comma"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"/>,NEWLINE # for an overview.NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See [<a class="xref" href="#RFC9999">RFC9999</a>], <a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">Section 2.3</a>,NEWLINE # for an overview.NEWLINE # NEWLINE # 9.44.3. displayFormat='parens'NEWLINE # NEWLINE # The output is an <a> element with "href" attribute whose value is theNEWLINE # value of the "target" attribute prepended by "#", and whose contentNEWLINE # is the value of the "target" attribute; the entire element is wrappedNEWLINE # in square brackets. This is followed by whitespace. This isNEWLINE # followed by an <a> element whose "href" attribute is the value of theNEWLINE # "derivedLink" attribute and whose content is the value of theNEWLINE # "derivedRemoteContent" attribute; the entire element is wrapped inNEWLINE # parentheses.NEWLINE # NEWLINE # For example, if Section 2.3 of RFC 9999 has the title "ProtocolNEWLINE # Overview", for an input of:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="parens"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"NEWLINE # derivedRemoteContent="Section 2.3"/> for an overview.NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See [<a class="relref" href="#RFC9999">RFC9999</a>]NEWLINE # (<a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">SectionNEWLINE # 2.3</a>) for an overview.NEWLINE # NEWLINE # 9.44.4. displayFormat='bare'NEWLINE # NEWLINE # The output is an <a> element whose "href" attribute is the value ofNEWLINE # the "derivedLink" attribute and whose content is the value of theNEWLINE # "derivedRemoteContent" attribute.NEWLINE # NEWLINE # For this input:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="bare"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"NEWLINE # derivedRemoteContent="Section 2.3"/> and ...NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See <a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">SectionNEWLINE # 2.3</a> and ...NEWLINENEWLINE # 9.45. <rfc>NEWLINE # NEWLINE # Various attributes of this element are represented in different partsNEWLINE # of the HTML document.NEWLINENEWLINE # 9.46. <section>NEWLINE # NEWLINE # This element is rendered as an HTML <section> element, containing anNEWLINE # appropriate level HTML heading element (<h2>-<h6>). That headingNEWLINE # element contains an <a> element around the part number (pn), ifNEWLINE # applicable (for instance, <abstract> does not get a section number).NEWLINE # Another <a> element is included with the section's name.NEWLINE # NEWLINE # <section id="intro">NEWLINE # <h2 id="s-1">NEWLINE # <a href="#s-1" class="selfRef">1.</a>NEWLINE # <a href="#intro" class="selfRef">Introduction</a>NEWLINE # </h2>NEWLINE # <p id="s-1-1">Paragraph <a href="#s-1-1" class="pilcrow">&para;</a>NEWLINE # </p>NEWLINE # </section>NEWLINE def render_section(self, h, x):NEWLINE section = add(x.tag, h, x)NEWLINE anchor = x.get('anchor')NEWLINE if anchor == 'toc':NEWLINE add.a(section, None, "\u25b2", href="#", onclick="scroll(0,0)", classes="toplink")NEWLINE for c in x.getchildren():NEWLINE self.render(section, c)NEWLINE return sectionNEWLINENEWLINENEWLINE # 9.47. <seriesInfo>NEWLINE # NEWLINE # This element is rendered in an HTML <span> element with CSS nameNEWLINE # "seriesInfo".NEWLINE # NEWLINE # <span class="seriesInfo">RFC 5646</span>NEWLINE ## This is different from what's shown in the sample documents, _and_NEWLINE ## different from what's shown in Section 6.5. Following the sample docNEWLINE ## here.NEWLINE def render_seriesinfo(self, h, x):NEWLINE def entry(dl, name, value):NEWLINE cls = slugify(name)NEWLINE dl.append( build.dt('%s:'%name, classes='label-%s'%cls))NEWLINE dl.append( build.dd(value, classes=cls))NEWLINE #NEWLINE name = x.get('name') NEWLINE value = x.get('value')NEWLINE if self.part == 'front':NEWLINE if name == 'RFC':NEWLINE value = build.a(value, href=os.path.join(self.options.rfc_base_url, 'rfc%s.txt'%value), classes='eref')NEWLINE elif name == 'DOI':NEWLINE value = build.a(value, href=os.path.join(self.options.doi_base_url, value), classes='eref')NEWLINE elif name == 'Internet-Draft':NEWLINE value = build.a(value, href=os.path.join(self.options.id_base_url, value), classes='eref')NEWLINE entry(h, name, value)NEWLINE return hNEWLINE elif self.part == 'references':NEWLINE span = add.span(h, x, name, ' ', value, classes='seriesInfo')NEWLINE return spanNEWLINE else:NEWLINE self.err(x, "Did not expect to be asked to render <%s> while in <%s>" % (x.tag, x.getparent().tag))NEWLINENEWLINENEWLINE # 9.48. <sourcecode>NEWLINE # NEWLINE # This element is rendered in an HTML <pre> element with a CSS class ofNEWLINE # "sourcecode". Note that CDATA blocks do not work consistently inNEWLINE # HTML, so all <, >, and & must be escaped as &lt;, &gt;, and &amp;,NEWLINE # respectively. If the input XML has a "type" attribute, another CSSNEWLINE # class of "lang-" and the type is added.NEWLINE # NEWLINE # If the sourcecode is not inside a <figure> element, a pilcrowNEWLINE # (Section 5.2) is included. Inside a <figure> element, the figureNEWLINE # title serves the purpose of the pilcrow.NEWLINE # NEWLINE # <pre class="sourcecode lang-c">NEWLINE # #include &lt;stdio.h&gt;NEWLINE # NEWLINE # int main(void)NEWLINE # {NEWLINE # printf(&quot;hello, world\n&quot;);NEWLINE # return 0;NEWLINE # }NEWLINE # </pre>NEWLINE def render_sourcecode(self, h, x):NEWLINE file = x.get('name')NEWLINE type = x.get('type')NEWLINE mark = x.get('markers') == 'true'NEWLINE classes = 'sourcecode'NEWLINE if type:NEWLINE classes += ' lang-%s' % typeNEWLINE div = add.div(h, x)NEWLINE div.text = NoneNEWLINE pre = add.pre(div, None, x.text, classes=classes)NEWLINE if mark:NEWLINE text = pre.textNEWLINE text = (' file "%s"\n%s' % (file, text)) if text else '\n%s' % textNEWLINE text = "<CODE BEGINS>%s\n<CODE ENDS>" % textNEWLINE pre.text = textNEWLINE self.maybe_add_pilcrow(div)NEWLINE return divNEWLINENEWLINENEWLINE def render_stream(self, h, x):NEWLINE return add.span(h, x, classes="stream")NEWLINENEWLINE # 9.49. <street>NEWLINE # NEWLINE # This element renders as an HTML <div> element with CSS class "street-NEWLINE # address".NEWLINE # NEWLINE # <div class="street-address">1899 Wynkoop St, Suite 600</div>NEWLINE def render_street(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='street-address')NEWLINENEWLINE def render_extaddr(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='extended-address')NEWLINENEWLINE def render_pobox(self, h, x):NEWLINE return self.address_line_renderer(h, x, classes='post-office-box')NEWLINENEWLINENEWLINE # 9.50. <strong>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_strong = default_rendererNEWLINENEWLINE # 9.51. <sub>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_sub = default_rendererNEWLINENEWLINE # 9.52. <sup>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_sup = default_rendererNEWLINENEWLINE # 9.53. <t>NEWLINE # NEWLINE # This element is rendered as an HTML <p> element. A pilcrowNEWLINE # (Section 5.2) is included.NEWLINE # NEWLINE # <p id="s-1-1">A paragraph.NEWLINE # <a href="#s-1-1" class="pilcrow">&para;</a></p>NEWLINE def render_t(self, h, x):NEWLINE p = add.p(h, x)NEWLINE for c in x.getchildren():NEWLINE self.render(p, c)NEWLINE add.a(p, None, pilcrow, classes='pilcrow', href='#%s'%x.get('pn'))NEWLINE return pNEWLINENEWLINE # NEWLINE # 9.54. <table>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE def render_table(self, h, x):NEWLINE name = x.find('name')NEWLINE if name != None and name.text:NEWLINE add.span(h, None, id=name.get('slugifiedName'))NEWLINE align = x.get('align')NEWLINE table = add.table(h, x, classes=align)NEWLINE caption = add.caption(table, None)NEWLINE pn = x.get('pn')NEWLINE a = add.a(caption, None, pn.replace('-',' ',1).title(), href='#%s'%pn)NEWLINE if name != None:NEWLINE a.tail = ':\n'NEWLINE a = add.a(caption, None, href='#%s'%name.get('slugifiedName'), classes='selfRef')NEWLINE self.inline_text_renderer(a, name)NEWLINE for c in x.getchildren():NEWLINE self.render(table, c)NEWLINE return tableNEWLINENEWLINE # 9.55. <tbody>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_tbody = default_rendererNEWLINENEWLINE # 9.56. <td>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE def render_td(self, h, x):NEWLINE classes = "text-%s" % x.get('align')NEWLINE hh = add(x.tag, h, x, classes=classes)NEWLINE hh.set('rowspan', x.get('rowspan', '1'))NEWLINE hh.set('colspan', x.get('colspan', '1'))NEWLINE for c in x.getchildren():NEWLINE self.render(hh, c)NEWLINENEWLINE # 9.57. <tfoot>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_tfoot = default_rendererNEWLINENEWLINE # 9.58. <th>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE def render_th(self, h, x):NEWLINE classes = "text-%s" % x.get('align')NEWLINE hh = add(x.tag, h, x, classes=classes)NEWLINE hh.set('rowspan', x.get('rowspan', '1'))NEWLINE hh.set('colspan', x.get('colspan', '1'))NEWLINE for c in x.getchildren():NEWLINE self.render(hh, c)NEWLINENEWLINE # 9.59. <thead>NEWLINE #NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_thead = default_rendererNEWLINE NEWLINE # 9.60. <title>NEWLINE # NEWLINE # The title of the document appears in a <title> element in the <head>NEWLINE # element, as described in Section 6.3.2.NEWLINE # NEWLINE # The title also appears in an <h1> element and follows directly afterNEWLINE # the Document Information. The <h1> element has an "id" attributeNEWLINE # with value "title".NEWLINE # NEWLINE # <h1 id="title">HyperText Markup Language Request ForNEWLINE # Comments Format</h1>NEWLINE # NEWLINE # Inside a reference, the title is rendered as an HTML <span> tag withNEWLINE # CSS class "refTitle". The text is surrounded by quotes inside theNEWLINE # <span>.NEWLINE # NEWLINE # <span class="refTitle">"Tags for Identifying Languages"</span>NEWLINE def render_title(self, h, x):NEWLINE pp = x.getparent().getparent()NEWLINE title = x.textNEWLINE if pp.get("quoteTitle") == 'true':NEWLINE title = '"%s"' % titleNEWLINE ascii = x.get('ascii')NEWLINE if ascii and not is_script(title, 'Latin'):NEWLINE if pp.get("quoteTitle") == 'true':NEWLINE ascii = '"%s"' % asciiNEWLINE #NEWLINE if self.part == 'references':NEWLINE if title:NEWLINE span = wrap_ascii('span', '', title, ascii, '', classes='refTitle')NEWLINE h.append(span)NEWLINE return spanNEWLINE else:NEWLINE h1 = build.h1(title, id='title')NEWLINE h.append(h1)NEWLINE return h1NEWLINENEWLINE # 9.61. <tr>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart.NEWLINE render_tr = default_rendererNEWLINENEWLINE # 9.62. <tt>NEWLINE # NEWLINE # This element is rendered as an HTML <code> element.NEWLINE def render_tt(self, h, x):NEWLINE hh = add.code(h, x)NEWLINE for c in x.getchildren():NEWLINE self.render(hh, c)NEWLINENEWLINE # 9.63. <ul>NEWLINE # NEWLINE # This element is directly rendered as its HTML counterpart. If theNEWLINE # "spacing" attribute has the value "compact", a CSS class ofNEWLINE # "ulCompact" will be added. If the "empty" attribute has the valueNEWLINE # "true", a CSS class of "ulEmpty" will be added.NEWLINE def render_ul(self, h, x):NEWLINE ul = build.ul()NEWLINE p = x.getparent()NEWLINE panchor = p.get('anchor')NEWLINE classes = h.get('class', '')NEWLINE if panchor in ['toc', ]:NEWLINE hh = wrap(ul, 'nav', **{'class': panchor})NEWLINE classes += ' '+panchor if classes else panchorNEWLINE else:NEWLINE hh = ulNEWLINE h.append(hh)NEWLINE if x.get('empty')=='true':NEWLINE if not 'ulEmpty' in classes:NEWLINE if classes:NEWLINE classes += ' 'NEWLINE classes += 'ulEmpty' NEWLINE if classes:NEWLINE ul.set('class', classes)NEWLINE for c in x.getchildren():NEWLINE self.render(ul, c)NEWLINE return ulNEWLINENEWLINE # RFC 7997NEWLINE # 3.4. Body of the DocumentNEWLINE # NEWLINE # When the mention of non-ASCII characters is required for correctNEWLINE # protocol operation and understanding, the characters' UnicodeNEWLINE # character name or code point MUST be included in the text.NEWLINE # NEWLINE # o Non-ASCII characters will require identifying the Unicode codeNEWLINE # point.NEWLINE # NEWLINE # o Use of the actual UTF-8 character (e.g., Δ) is encouraged soNEWLINE # that a reader can more easily see what the character is, if theirNEWLINE # device can render the text.NEWLINE # NEWLINE # o The use of the Unicode character names like "INCREMENT" inNEWLINE # addition to the use of Unicode code points is also encouraged.NEWLINE # When used, Unicode character names should be in all capitalNEWLINE # letters.NEWLINE # NEWLINE # Examples:NEWLINE # NEWLINE # OLD [RFC7564]:NEWLINE # NEWLINE # However, the problem is made more serious by introducing the fullNEWLINE # range of Unicode code points into protocol strings. For example,NEWLINE # the characters U+13DA U+13A2 U+13B5 U+13AC U+13A2 U+13AC U+13D2 fromNEWLINE # the Cherokee block look similar to the ASCII characters "STPETER" asNEWLINE # they might appear when presented using a "creative" font family.NEWLINE # NEWLINE # NEW/ALLOWED:NEWLINE # NEWLINE # However, the problem is made more serious by introducing the fullNEWLINE # range of Unicode code points into protocol strings. For example,NEWLINE # the characters U+13DA U+13A2 U+13B5 U+13AC U+13A2 U+13AC U+13D2NEWLINE # (ᏚᎢᎵᎬᎢᎬᏒ) from the Cherokee block look similar to the ASCIINEWLINE # characters "STPETER" as they might appear when presented using aNEWLINE # "creative" font family.NEWLINE # NEWLINE # ALSO ACCEPTABLE:NEWLINE # NEWLINE # However, the problem is made more serious by introducing the fullNEWLINE # range of Unicode code points into protocol strings. For example,NEWLINE # the characters "ᏚᎢᎵᎬᎢᎬᏒ" (U+13DA U+13A2 U+13B5 U+13AC U+13A2NEWLINE # U+13AC U+13D2) from the Cherokee block look similar to the ASCIINEWLINE # characters "STPETER" as they might appear when presented using aNEWLINE # "creative" font family.NEWLINE # NEWLINE # Example of proper identification of Unicode characters in an RFC:NEWLINE # NEWLINE # Flanagan Expires October 27, 2016 [Page 6]NEWLINE # NEWLINE # NEWLINE # Internet-Draft non-ASCII in RFCs April 2016NEWLINE # NEWLINE # NEWLINE # Acceptable:NEWLINE # NEWLINE # o Temperature changes in the Temperature Control Protocol areNEWLINE # indicated by the U+2206 character.NEWLINE # NEWLINE # Preferred:NEWLINE # NEWLINE # 1. Temperature changes in the Temperature Control Protocol areNEWLINE # indicated by the U+2206 character ("Δ").NEWLINE # NEWLINE # 2. Temperature changes in the Temperature Control Protocol areNEWLINE # indicated by the U+2206 character (INCREMENT).NEWLINE # NEWLINE # 3. Temperature changes in the Temperature Control Protocol areNEWLINE # indicated by the U+2206 character ("Δ", INCREMENT).NEWLINE # NEWLINE # 4. Temperature changes in the Temperature Control Protocol areNEWLINE # indicated by the U+2206 character (INCREMENT, "Δ").NEWLINE # NEWLINE # 5. Temperature changes in the Temperature Control Protocol areNEWLINE # indicated by the "Delta" character "Δ" (U+2206).NEWLINE # NEWLINE # 6. Temperature changes in the Temperature Control Protocol areNEWLINE # indicated by the character "Δ" (INCREMENT, U+2206).NEWLINE def render_u(self, h, x):NEWLINE try:NEWLINE text = expand_unicode_element(x)NEWLINE except (RuntimeError, ValueError) as e:NEWLINE self.err(x, e)NEWLINE text = ''NEWLINE anchor = x.get('anchor')NEWLINE xref = self.root.find('.//xref[@target="%s"]'%anchor) if anchor else NoneNEWLINE if xref != None:NEWLINE # render only literal hereNEWLINE text = x.textNEWLINE span = add.span(h, None, text, classes="unicode", id=anchor)NEWLINE span.tail = x.tailNEWLINE return spanNEWLINE NEWLINE # 9.64. <uri>NEWLINE # NEWLINE # This element is rendered as an HTML <div> containing the stringNEWLINE # "URI:" and an HTML <a> element with the "href" attribute set to theNEWLINE # linked URI, CSS class of "url" (note that the value is "url", notNEWLINE # "uri" as one might expect), and the contents set to the linked URI.NEWLINE # NEWLINE # <div>URI:NEWLINE # <a href="http://www.example.com"NEWLINE # class="url">http://www.example.com</a>NEWLINE # </div>NEWLINE def render_uri(self, h, x):NEWLINE if not x.text:NEWLINE return NoneNEWLINE value = x.text.strip()NEWLINE cls = 'url'NEWLINE div = add.div(h, None,NEWLINE build.span("URI:"), '\n',NEWLINE build.a(value, href=value, classes=cls),NEWLINE classes=cls,NEWLINE )NEWLINE return divNEWLINENEWLINE # 9.65. <workgroup>NEWLINE # NEWLINE # This element does not add any direct output to HTML.NEWLINE render_workgroup = null_renderer # handled in render_rfc, when rendering the page top for draftsNEWLINENEWLINE # 9.66. <xref>NEWLINE # NEWLINE # This element is rendered as an HTML <a> element containing anNEWLINE # appropriate local link as the "href" attribute. The value of theNEWLINE # "href" attribute is taken from the "target" attribute, prepended byNEWLINE # "#". The <a> element generated will have class "xref". The contentsNEWLINE # of the <a> element are the value of the "derivedContent" attribute.NEWLINE # If the "format" attribute has the value "default", and the "target"NEWLINE # attribute points to a <reference> or <referencegroup> element, thenNEWLINE # the generated <a> element is surrounded by square brackets in theNEWLINE # output.NEWLINE # NEWLINE # <a class="xref" href="#target">Table 2</a>NEWLINE # NEWLINE # orNEWLINE # NEWLINE # [<a class="xref" href="#RFC1234">RFC1234</a>]NEWLINE # NEWLINE def render_xref(self, h, x):NEWLINE # possible attributes:NEWLINE target = x.get('target')NEWLINE #pageno = x.get('pageno')NEWLINE #format = x.get('format')NEWLINE section = x.get('section')NEWLINE relative= x.get('relative')NEWLINE #sformat = x.get('sectionFormat')NEWLINE reftext = x.get('derivedContent', '')NEWLINE in_name = len(list(x.iterancestors('name'))) > 0NEWLINE if reftext is None:NEWLINE self.die(x, "Found an <%s> without derivedContent: %s" % (x.tag, lxml.etree.tostring(x),))NEWLINE if not (section or relative):NEWLINE # plain xrefNEWLINE if in_name:NEWLINE hh = build.em(reftext, classes="xref")NEWLINE else:NEWLINE if reftext:NEWLINE a = build.a(reftext, href='#%s'%target, classes='xref')NEWLINE if target in self.refname_mapping:NEWLINE if x.text and x.text.strip() and x.text.strip() != reftext:NEWLINE aa = build.a(x.text, href='#%s'%target, classes='xref')NEWLINE hh = build.span(aa, ' [', a, ']')NEWLINE else:NEWLINE hh = build.span('[', a, ']')NEWLINE else:NEWLINE if x.text and x.text.strip() and x.text.strip() != reftext:NEWLINE aa = build.a(x.text, href='#%s'%target, classes='xref')NEWLINE hh = build.span(aa, ' (', a, ')')NEWLINE else:NEWLINE hh = aNEWLINE else:NEWLINE a = build.a(x.text or '', href='#%s'%target, classes='xref')NEWLINE hh = aNEWLINE hh.tail = x.tailNEWLINE h.append(hh)NEWLINE return hhNEWLINE else:NEWLINE label = 'Section' if section[0].isdigit() else 'Appendix'NEWLINE link = x.get('derivedLink')NEWLINE format = x.get('sectionFormat')NEWLINE # 9.44.1. displayFormat='of'NEWLINE # NEWLINE # The output is an <a class='relref'> HTML tag, with contents ofNEWLINE # "Section " and the value of the "section" attribute. This isNEWLINE # followed by the word "of" (surrounded by whitespace). This isNEWLINE # followed by the <a class='xref'> HTML tag (surrounded by squareNEWLINE # brackets).NEWLINE # NEWLINE # For example, with an input of:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="of"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"/>NEWLINE # for an overview.NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See <a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">SectionNEWLINE # 2.3</a> of [<a class="xref" href="#RFC9999">RFC9999</a>]NEWLINE # for an overview.NEWLINE if format == 'of':NEWLINE span = add.span(h, None,NEWLINE build.a('%s %s'%(label, section), href=link, classes='relref'),NEWLINE ' of [',NEWLINE build.a(reftext, href='#%s'%target, classes='xref'),NEWLINE ']',NEWLINE )NEWLINE return spanNEWLINENEWLINE # 9.44.2. displayFormat='comma'NEWLINE # NEWLINE # The output is an <a class='xref'> HTML tag (wrapped by squareNEWLINE # brackets), followed by a comma (","), followed by whitespace,NEWLINE # followed by an <a class='relref'> HTML tag, with contents ofNEWLINE # "Section " and the value of the "section" attribute.NEWLINE # NEWLINE # For example, with an input of:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="comma"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"/>,NEWLINE # for an overview.NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See [<a class="xref" href="#RFC9999">RFC9999</a>], <a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">Section 2.3</a>,NEWLINE # for an overview.NEWLINE elif format == 'comma':NEWLINE span = add.span(h, None,NEWLINE '[',NEWLINE build.a(reftext, href='#%s'%target, classes='xref'),NEWLINE '], ',NEWLINE build.a('%s %s'%(label, section), href=link, classes='relref'),NEWLINE )NEWLINE return spanNEWLINENEWLINENEWLINE # 9.44.3. displayFormat='parens'NEWLINE # NEWLINE # The output is an <a> element with "href" attribute whose value is theNEWLINE # value of the "target" attribute prepended by "#", and whose contentNEWLINE # is the value of the "target" attribute; the entire element is wrappedNEWLINE # in square brackets. This is followed by whitespace. This isNEWLINE # followed by an <a> element whose "href" attribute is the value of theNEWLINE # "derivedLink" attribute and whose content is the value of theNEWLINE # "derivedRemoteContent" attribute; the entire element is wrapped inNEWLINE # parentheses.NEWLINE # NEWLINE # For example, if Section 2.3 of RFC 9999 has the title "ProtocolNEWLINE # Overview", for an input of:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="parens"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"NEWLINE # derivedRemoteContent="Section 2.3"/> for an overview.NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See [<a class="relref" href="#RFC9999">RFC9999</a>]NEWLINE # (<a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">SectionNEWLINE # 2.3</a>) for an overview.NEWLINE elif format == 'parens':NEWLINE span = add.span(h, None,NEWLINE '[',NEWLINE build.a(reftext, href='#%s'%target, classes='xref'),NEWLINE '] (',NEWLINE build.a('%s %s'%(label, section), href=link, classes='relref'),NEWLINE ')',NEWLINE )NEWLINE return spanNEWLINENEWLINE # NEWLINE # 9.44.4. displayFormat='bare'NEWLINE # NEWLINE # The output is an <a> element whose "href" attribute is the value ofNEWLINE # the "derivedLink" attribute and whose content is the value of theNEWLINE # "derivedRemoteContent" attribute.NEWLINE # NEWLINE # For this input:NEWLINE # NEWLINE # See <relref section="2.3" target="RFC9999" displayFormat="bare"NEWLINE # derivedLink="http://www.rfc-editor.org/info/rfc9999#s-2.3"NEWLINE # derivedRemoteContent="Section 2.3"/> and ...NEWLINE # NEWLINE # The HTML generated will be:NEWLINE # NEWLINE # See <a class="relref"NEWLINE # href="http://www.rfc-editor.org/info/rfc9999#s-2.3">SectionNEWLINE # 2.3</a> and ...NEWLINE elif format == 'bare':NEWLINE span = add.span(h, None,NEWLINE build.a('%s %s'%(label, section), href=link, classes='relref'),NEWLINE )NEWLINE return spanNEWLINE else:NEWLINE self.err(x, 'Unexpected value combination: section: %s relative: %s format: %s' %(section, relative, format))NEWLINENEWLINENEWLINE # --------------------------------------------------------------------------NEWLINE # Post processingNEWLINE def post_process(self, h):NEWLINE for x in h.iter():NEWLINE if x.text and x.text.strip() and '\u2028' in x.text:NEWLINE parts = x.text.split('\u2028')NEWLINE x.text = parts[0]NEWLINE for t in parts[1:]:NEWLINE br = build.br()NEWLINE br.tail = tNEWLINE x.append( br )NEWLINE if x.tail and x.tail.strip() and '\u2028' in x.tail:NEWLINE p = x.getparent()NEWLINE i = p.index(x)+1NEWLINE parts = x.tail.split('\u2028')NEWLINE x.tail = parts[0]NEWLINE for t in parts[1:]:NEWLINE br = build.br()NEWLINE br.tail = tNEWLINE p.insert(br, i)NEWLINE i += 1NEWLINE return hNEWLINENEWLINE # --- class variables ------------------------------------------------------NEWLINENEWLINE element_tags = [NEWLINE 'abstract',NEWLINE 'address',NEWLINE 'annotation',NEWLINE 'artset',NEWLINE 'artwork',NEWLINE 'aside',NEWLINE 'author',NEWLINE 'back',NEWLINE 'bcp14',NEWLINE 'blockquote',NEWLINE 'boilerplate',NEWLINE 'br',NEWLINE 'city',NEWLINE 'code',NEWLINE 'country',NEWLINE 'cref',NEWLINE 'date',NEWLINE 'dd',NEWLINE 'displayreference',NEWLINE 'dl',NEWLINE 'dt',NEWLINE 'em',NEWLINE 'email',NEWLINE 'eref',NEWLINE 'figure',NEWLINE 'front',NEWLINE 'iref',NEWLINE 'li',NEWLINE 'link',NEWLINE 'middle',NEWLINE 'name',NEWLINE 'note',NEWLINE 'ol',NEWLINE 'organization',NEWLINE 'phone',NEWLINE 'postal',NEWLINE 'postalLine',NEWLINE 'refcontent',NEWLINE 'reference',NEWLINE 'referencegroup',NEWLINE 'references',NEWLINE 'region',NEWLINE 'relref',NEWLINE 'rfc',NEWLINE 'section',NEWLINE 'seriesInfo',NEWLINE 'sourcecode',NEWLINE 'street',NEWLINE 'strong',NEWLINE 'sub',NEWLINE 'sup',NEWLINE 't',NEWLINE 'table',NEWLINE 'tbody',NEWLINE 'td',NEWLINE 'tfoot',NEWLINE 'th',NEWLINE 'thead',NEWLINE 'title',NEWLINE 'tr',NEWLINE 'tt',NEWLINE 'ul',NEWLINE 'uri',NEWLINE 'xref',NEWLINE ]NEWLINE deprecated_element_tags = [NEWLINE 'list',NEWLINE 'spanx',NEWLINE 'vspace',NEWLINE 'c',NEWLINE 'texttable',NEWLINE 'ttcol',NEWLINE 'facsimile',NEWLINE 'format',NEWLINE 'preamble',NEWLINE 'postamble',NEWLINE ]NEWLINE unused_front_element_renderers = [NEWLINE 'area',NEWLINE 'keyword',NEWLINE 'workgroup',NEWLINE ]NEWLINE all_element_tags = element_tags + deprecated_element_tags + unused_front_element_renderersNEWLINE deprecated_attributes = [NEWLINE # element, attrbuteNEWLINE ('figure', 'align'),NEWLINE ('section', 'title'),NEWLINE ('note', 'title'),NEWLINE ('figure', 'title'),NEWLINE ('references', 'title'),NEWLINE ('texttable', 'title'),NEWLINE ('figure', 'src'),NEWLINE ('artwork', 'xml:space'),NEWLINE ('artwork', 'height'),NEWLINE ('artwork', 'width'),NEWLINE ('figure', 'height'),NEWLINE ('figure', 'width'),NEWLINE ('xref', 'pageno'),NEWLINE ]NEWLINE
# Generated by Django 3.0.3 on 2020-03-11 12:12NEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django.db import migrations, modelsNEWLINEimport django.db.models.deletionNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('fedireads', '0014_status_remote_id'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.CreateModel(NEWLINE name='UserBlocks',NEWLINE fields=[NEWLINE ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),NEWLINE ('created_date', models.DateTimeField(auto_now_add=True)),NEWLINE ('updated_date', models.DateTimeField(auto_now=True)),NEWLINE ('relationship_id', models.CharField(max_length=100)),NEWLINE ],NEWLINE options={NEWLINE 'abstract': False,NEWLINE },NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name='UserFollowRequest',NEWLINE fields=[NEWLINE ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),NEWLINE ('created_date', models.DateTimeField(auto_now_add=True)),NEWLINE ('updated_date', models.DateTimeField(auto_now=True)),NEWLINE ('relationship_id', models.CharField(max_length=100)),NEWLINE ],NEWLINE options={NEWLINE 'abstract': False,NEWLINE },NEWLINE ),NEWLINE migrations.CreateModel(NEWLINE name='UserFollows',NEWLINE fields=[NEWLINE ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),NEWLINE ('created_date', models.DateTimeField(auto_now_add=True)),NEWLINE ('updated_date', models.DateTimeField(auto_now=True)),NEWLINE ('relationship_id', models.CharField(max_length=100)),NEWLINE ],NEWLINE options={NEWLINE 'abstract': False,NEWLINE },NEWLINE ),NEWLINE migrations.RemoveField(NEWLINE model_name='user',NEWLINE name='followers',NEWLINE ),NEWLINE migrations.DeleteModel(NEWLINE name='UserRelationship',NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='userfollows',NEWLINE name='user_object',NEWLINE field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='userfollows_user_object', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='userfollows',NEWLINE name='user_subject',NEWLINE field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='userfollows_user_subject', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='userfollowrequest',NEWLINE name='user_object',NEWLINE field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='userfollowrequest_user_object', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='userfollowrequest',NEWLINE name='user_subject',NEWLINE field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='userfollowrequest_user_subject', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='userblocks',NEWLINE name='user_object',NEWLINE field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='userblocks_user_object', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='userblocks',NEWLINE name='user_subject',NEWLINE field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='userblocks_user_subject', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='user',NEWLINE name='blocks',NEWLINE field=models.ManyToManyField(related_name='blocked_by', through='fedireads.UserBlocks', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='user',NEWLINE name='follow_requests',NEWLINE field=models.ManyToManyField(related_name='follower_requests', through='fedireads.UserFollowRequest', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddField(NEWLINE model_name='user',NEWLINE name='following',NEWLINE field=models.ManyToManyField(related_name='followers', through='fedireads.UserFollows', to=settings.AUTH_USER_MODEL),NEWLINE ),NEWLINE migrations.AddConstraint(NEWLINE model_name='userfollows',NEWLINE constraint=models.UniqueConstraint(fields=('user_subject', 'user_object'), name='userfollows_unique'),NEWLINE ),NEWLINE migrations.AddConstraint(NEWLINE model_name='userfollowrequest',NEWLINE constraint=models.UniqueConstraint(fields=('user_subject', 'user_object'), name='userfollowrequest_unique'),NEWLINE ),NEWLINE migrations.AddConstraint(NEWLINE model_name='userblocks',NEWLINE constraint=models.UniqueConstraint(fields=('user_subject', 'user_object'), name='userblocks_unique'),NEWLINE ),NEWLINE ]NEWLINE
from py_asciimath import PROJECT_ROOTNEWLINEfrom py_asciimath.translator.translator import (NEWLINE ASCIIMath2MathML,NEWLINE ASCIIMath2Tex,NEWLINE MathML2Tex,NEWLINE)NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE print("ASCIIMath to MathML")NEWLINE asciimath2mathml = ASCIIMath2MathML(log=False, inplace=True)NEWLINE parsed = asciimath2mathml.translate(NEWLINE PROJECT_ROOT + "/../examples/asciimath_exp.txt",NEWLINE displaystyle=True,NEWLINE dtd="mathml2",NEWLINE dtd_validation=True,NEWLINE from_file=True,NEWLINE output="string",NEWLINE network=True,NEWLINE pprint=False,NEWLINE to_file=None,NEWLINE xml_declaration=True,NEWLINE xml_pprint=True,NEWLINE )NEWLINENEWLINE print(parsed, "\n\nASCIIMath to LaTeX")NEWLINE asciimath2tex = ASCIIMath2Tex(log=False, inplace=True)NEWLINE parsed = asciimath2tex.translate(NEWLINE PROJECT_ROOT + "/../examples/asciimath_exp.txt",NEWLINE displaystyle=True,NEWLINE from_file=True,NEWLINE pprint=False,NEWLINE to_file=None,NEWLINE )NEWLINENEWLINE print(parsed, "\n\nMathML to LaTeX")NEWLINE mathml2tex = MathML2Tex()NEWLINE parsed = mathml2tex.translate(NEWLINE PROJECT_ROOT + "/../examples/mathml_exp.xml",NEWLINE from_file=True,NEWLINE network=False,NEWLINE to_file=None,NEWLINE )NEWLINE print(parsed)NEWLINE
#!/usr/bin/env pythonNEWLINENEWLINE'''NEWLINEEC2 external inventory scriptNEWLINE=================================NEWLINENEWLINEGenerates inventory that Ansible can understand by making API request toNEWLINEAWS EC2 using the Boto library.NEWLINENEWLINENOTE: This script assumes Ansible is being executed where the environmentNEWLINEvariables needed for Boto have already been set:NEWLINE export AWS_ACCESS_KEY_ID='AK123'NEWLINE export AWS_SECRET_ACCESS_KEY='abc123'NEWLINENEWLINEoptional region environment variable if region is 'auto'NEWLINENEWLINEThis script also assumes there is an ec2.ini file alongside it. To specify aNEWLINEdifferent path to ec2.ini, define the EC2_INI_PATH environment variable:NEWLINENEWLINE export EC2_INI_PATH=/path/to/my_ec2.iniNEWLINENEWLINEIf you're using eucalyptus you need to set the above variables andNEWLINEyou need to define:NEWLINENEWLINE export EC2_URL=http://hostname_of_your_cc:port/services/EucalyptusNEWLINENEWLINEIf you're using boto profiles (requires boto>=2.24.0) you can choose a profileNEWLINEusing the --boto-profile command line argument (e.g. ec2.py --boto-profile prod) or usingNEWLINEthe AWS_PROFILE variable:NEWLINENEWLINE AWS_PROFILE=prod ansible-playbook -i ec2.py myplaybook.ymlNEWLINENEWLINEFor more details, see: http://docs.pythonboto.org/en/latest/boto_config_tut.htmlNEWLINENEWLINEWhen run against a specific host, this script returns the following variables:NEWLINE - ec2_ami_launch_indexNEWLINE - ec2_architectureNEWLINE - ec2_associationNEWLINE - ec2_attachTimeNEWLINE - ec2_attachmentNEWLINE - ec2_attachmentIdNEWLINE - ec2_block_devicesNEWLINE - ec2_client_tokenNEWLINE - ec2_deleteOnTerminationNEWLINE - ec2_descriptionNEWLINE - ec2_deviceIndexNEWLINE - ec2_dns_nameNEWLINE - ec2_eventsSetNEWLINE - ec2_group_nameNEWLINE - ec2_hypervisorNEWLINE - ec2_idNEWLINE - ec2_image_idNEWLINE - ec2_instanceStateNEWLINE - ec2_instance_typeNEWLINE - ec2_ipOwnerIdNEWLINE - ec2_ip_addressNEWLINE - ec2_itemNEWLINE - ec2_kernelNEWLINE - ec2_key_nameNEWLINE - ec2_launch_timeNEWLINE - ec2_monitoredNEWLINE - ec2_monitoringNEWLINE - ec2_networkInterfaceIdNEWLINE - ec2_ownerIdNEWLINE - ec2_persistentNEWLINE - ec2_placementNEWLINE - ec2_platformNEWLINE - ec2_previous_stateNEWLINE - ec2_private_dns_nameNEWLINE - ec2_private_ip_addressNEWLINE - ec2_publicIpNEWLINE - ec2_public_dns_nameNEWLINE - ec2_ramdiskNEWLINE - ec2_reasonNEWLINE - ec2_regionNEWLINE - ec2_requester_idNEWLINE - ec2_root_device_nameNEWLINE - ec2_root_device_typeNEWLINE - ec2_security_group_idsNEWLINE - ec2_security_group_namesNEWLINE - ec2_shutdown_stateNEWLINE - ec2_sourceDestCheckNEWLINE - ec2_spot_instance_request_idNEWLINE - ec2_stateNEWLINE - ec2_state_codeNEWLINE - ec2_state_reasonNEWLINE - ec2_statusNEWLINE - ec2_subnet_idNEWLINE - ec2_tenancyNEWLINE - ec2_virtualization_typeNEWLINE - ec2_vpc_idNEWLINENEWLINEThese variables are pulled out of a boto.ec2.instance object. There is a lack ofNEWLINEconsistency with variable spellings (camelCase and underscores) since thisNEWLINEjust loops through all variables the object exposes. It is preferred to use theNEWLINEones with underscores when multiple exist.NEWLINENEWLINEIn addition, if an instance has AWS Tags associated with it, each tag is a newNEWLINEvariable named:NEWLINE - ec2_tag_[Key] = [Value]NEWLINENEWLINESecurity groups are comma-separated in 'ec2_security_group_ids' andNEWLINE'ec2_security_group_names'.NEWLINE'''NEWLINENEWLINE# (c) 2012, Peter SankauskasNEWLINE#NEWLINE# This file is part of Ansible,NEWLINE#NEWLINE# Ansible is free software: you can redistribute it and/or modifyNEWLINE# it under the terms of the GNU General Public License as published byNEWLINE# the Free Software Foundation, either version 3 of the License, orNEWLINE# (at your option) any later version.NEWLINE#NEWLINE# Ansible is distributed in the hope that it will be useful,NEWLINE# but WITHOUT ANY WARRANTY; without even the implied warranty ofNEWLINE# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See theNEWLINE# GNU General Public License for more details.NEWLINE#NEWLINE# You should have received a copy of the GNU General Public LicenseNEWLINE# along with Ansible. If not, see <http://www.gnu.org/licenses/>.NEWLINENEWLINE######################################################################NEWLINENEWLINEimport sysNEWLINEimport osNEWLINEimport argparseNEWLINEimport reNEWLINEfrom time import timeNEWLINEimport botoNEWLINEfrom boto import ec2NEWLINEfrom boto import rdsNEWLINEfrom boto import elasticacheNEWLINEfrom boto import route53NEWLINEfrom boto import stsNEWLINEimport sixNEWLINENEWLINEfrom ansible.module_utils import ec2 as ec2_utilsNEWLINENEWLINEHAS_BOTO3 = FalseNEWLINEtry:NEWLINE import boto3NEWLINE HAS_BOTO3 = TrueNEWLINEexcept ImportError:NEWLINE passNEWLINENEWLINEfrom six.moves import configparserNEWLINEfrom collections import defaultdictNEWLINENEWLINEtry:NEWLINE import jsonNEWLINEexcept ImportError:NEWLINE import simplejson as jsonNEWLINENEWLINENEWLINEclass Ec2Inventory(object):NEWLINENEWLINE def _empty_inventory(self):NEWLINE return {"_meta": {"hostvars": {}}}NEWLINENEWLINE def __init__(self):NEWLINE ''' Main execution path '''NEWLINENEWLINE # Inventory grouped by instance IDs, tags, security groups, regions,NEWLINE # and availability zonesNEWLINE self.inventory = self._empty_inventory()NEWLINENEWLINE self.aws_account_id = NoneNEWLINENEWLINE # Index of hostname (address) to instance IDNEWLINE self.index = {}NEWLINENEWLINE # Boto profile to use (if any)NEWLINE self.boto_profile = NoneNEWLINENEWLINE # AWS credentials.NEWLINE self.credentials = {}NEWLINENEWLINE # Read settings and parse CLI argumentsNEWLINE self.parse_cli_args()NEWLINE self.read_settings()NEWLINENEWLINE # Make sure that profile_name is not passed at all if not setNEWLINE # as pre 2.24 boto will fall over otherwiseNEWLINE if self.boto_profile:NEWLINE if not hasattr(boto.ec2.EC2Connection, 'profile_name'):NEWLINE self.fail_with_error("boto version must be >= 2.24 to use profile")NEWLINENEWLINE # CacheNEWLINE if self.args.refresh_cache:NEWLINE self.do_api_calls_update_cache()NEWLINE elif not self.is_cache_valid():NEWLINE self.do_api_calls_update_cache()NEWLINENEWLINE # Data to printNEWLINE if self.args.host:NEWLINE data_to_print = self.get_host_info()NEWLINENEWLINE elif self.args.list:NEWLINE # Display list of instances for inventoryNEWLINE if self.inventory == self._empty_inventory():NEWLINE data_to_print = self.get_inventory_from_cache()NEWLINE else:NEWLINE data_to_print = self.json_format_dict(self.inventory, True)NEWLINENEWLINE print(data_to_print)NEWLINENEWLINE def is_cache_valid(self):NEWLINE ''' Determines if the cache files have expired, or if it is still valid '''NEWLINENEWLINE if os.path.isfile(self.cache_path_cache):NEWLINE mod_time = os.path.getmtime(self.cache_path_cache)NEWLINE current_time = time()NEWLINE if (mod_time + self.cache_max_age) > current_time:NEWLINE if os.path.isfile(self.cache_path_index):NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINE def read_settings(self):NEWLINE ''' Reads the settings from the ec2.ini file '''NEWLINENEWLINE scriptbasename = __file__NEWLINE scriptbasename = os.path.basename(scriptbasename)NEWLINE scriptbasename = scriptbasename.replace('.py', '')NEWLINENEWLINE defaults = {NEWLINE 'ec2': {NEWLINE 'ini_fallback': os.path.join(os.path.dirname(__file__), 'ec2.ini'),NEWLINE 'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename)NEWLINE }NEWLINE }NEWLINENEWLINE if six.PY3:NEWLINE config = configparser.ConfigParser()NEWLINE else:NEWLINE config = configparser.SafeConfigParser()NEWLINE ec2_ini_path = os.environ.get('EC2_INI_PATH', defaults['ec2']['ini_path'])NEWLINE ec2_ini_path = os.path.expanduser(os.path.expandvars(ec2_ini_path))NEWLINENEWLINE if not os.path.isfile(ec2_ini_path):NEWLINE ec2_ini_path = os.path.expanduser(defaults['ec2']['ini_fallback'])NEWLINENEWLINE config.read(ec2_ini_path)NEWLINENEWLINE # is eucalyptus?NEWLINE self.eucalyptus_host = NoneNEWLINE self.eucalyptus = FalseNEWLINE if config.has_option('ec2', 'eucalyptus'):NEWLINE self.eucalyptus = config.getboolean('ec2', 'eucalyptus')NEWLINE if self.eucalyptus and config.has_option('ec2', 'eucalyptus_host'):NEWLINE self.eucalyptus_host = config.get('ec2', 'eucalyptus_host')NEWLINENEWLINE # RegionsNEWLINE self.regions = []NEWLINE configRegions = config.get('ec2', 'regions')NEWLINE if (configRegions == 'all'):NEWLINE if self.eucalyptus_host:NEWLINE self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name, **self.credentials)NEWLINE else:NEWLINE configRegions_exclude = config.get('ec2', 'regions_exclude')NEWLINE for regionInfo in ec2.regions():NEWLINE if regionInfo.name not in configRegions_exclude:NEWLINE self.regions.append(regionInfo.name)NEWLINE else:NEWLINE self.regions = configRegions.split(",")NEWLINE if 'auto' in self.regions:NEWLINE env_region = os.environ.get('AWS_REGION')NEWLINE if env_region is None:NEWLINE env_region = os.environ.get('AWS_DEFAULT_REGION')NEWLINE self.regions = [env_region]NEWLINENEWLINE # Destination addressesNEWLINE self.destination_variable = config.get('ec2', 'destination_variable')NEWLINE self.vpc_destination_variable = config.get('ec2', 'vpc_destination_variable')NEWLINENEWLINE if config.has_option('ec2', 'hostname_variable'):NEWLINE self.hostname_variable = config.get('ec2', 'hostname_variable')NEWLINE else:NEWLINE self.hostname_variable = NoneNEWLINENEWLINE if config.has_option('ec2', 'destination_format') and \NEWLINE config.has_option('ec2', 'destination_format_tags'):NEWLINE self.destination_format = config.get('ec2', 'destination_format')NEWLINE self.destination_format_tags = config.get('ec2', 'destination_format_tags').split(',')NEWLINE else:NEWLINE self.destination_format = NoneNEWLINE self.destination_format_tags = NoneNEWLINENEWLINE # Route53NEWLINE self.route53_enabled = config.getboolean('ec2', 'route53')NEWLINE if config.has_option('ec2', 'route53_hostnames'):NEWLINE self.route53_hostnames = config.get('ec2', 'route53_hostnames')NEWLINE else:NEWLINE self.route53_hostnames = NoneNEWLINE self.route53_excluded_zones = []NEWLINE if config.has_option('ec2', 'route53_excluded_zones'):NEWLINE self.route53_excluded_zones.extend(NEWLINE config.get('ec2', 'route53_excluded_zones', '').split(','))NEWLINENEWLINE # Include RDS instances?NEWLINE self.rds_enabled = TrueNEWLINE if config.has_option('ec2', 'rds'):NEWLINE self.rds_enabled = config.getboolean('ec2', 'rds')NEWLINENEWLINE # Include RDS cluster instances?NEWLINE if config.has_option('ec2', 'include_rds_clusters'):NEWLINE self.include_rds_clusters = config.getboolean('ec2', 'include_rds_clusters')NEWLINE else:NEWLINE self.include_rds_clusters = FalseNEWLINENEWLINE # Include ElastiCache instances?NEWLINE self.elasticache_enabled = TrueNEWLINE if config.has_option('ec2', 'elasticache'):NEWLINE self.elasticache_enabled = config.getboolean('ec2', 'elasticache')NEWLINENEWLINE # Return all EC2 instances?NEWLINE if config.has_option('ec2', 'all_instances'):NEWLINE self.all_instances = config.getboolean('ec2', 'all_instances')NEWLINE else:NEWLINE self.all_instances = FalseNEWLINENEWLINE # Instance states to be gathered in inventory. Default is 'running'.NEWLINE # Setting 'all_instances' to 'yes' overrides this option.NEWLINE ec2_valid_instance_states = [NEWLINE 'pending',NEWLINE 'running',NEWLINE 'shutting-down',NEWLINE 'terminated',NEWLINE 'stopping',NEWLINE 'stopped'NEWLINE ]NEWLINE self.ec2_instance_states = []NEWLINE if self.all_instances:NEWLINE self.ec2_instance_states = ec2_valid_instance_statesNEWLINE elif config.has_option('ec2', 'instance_states'):NEWLINE for instance_state in config.get('ec2', 'instance_states').split(','):NEWLINE instance_state = instance_state.strip()NEWLINE if instance_state not in ec2_valid_instance_states:NEWLINE continueNEWLINE self.ec2_instance_states.append(instance_state)NEWLINE else:NEWLINE self.ec2_instance_states = ['running']NEWLINENEWLINE # Return all RDS instances? (if RDS is enabled)NEWLINE if config.has_option('ec2', 'all_rds_instances') and self.rds_enabled:NEWLINE self.all_rds_instances = config.getboolean('ec2', 'all_rds_instances')NEWLINE else:NEWLINE self.all_rds_instances = FalseNEWLINENEWLINE # Return all ElastiCache replication groups? (if ElastiCache is enabled)NEWLINE if config.has_option('ec2', 'all_elasticache_replication_groups') and self.elasticache_enabled:NEWLINE self.all_elasticache_replication_groups = config.getboolean('ec2', 'all_elasticache_replication_groups')NEWLINE else:NEWLINE self.all_elasticache_replication_groups = FalseNEWLINENEWLINE # Return all ElastiCache clusters? (if ElastiCache is enabled)NEWLINE if config.has_option('ec2', 'all_elasticache_clusters') and self.elasticache_enabled:NEWLINE self.all_elasticache_clusters = config.getboolean('ec2', 'all_elasticache_clusters')NEWLINE else:NEWLINE self.all_elasticache_clusters = FalseNEWLINENEWLINE # Return all ElastiCache nodes? (if ElastiCache is enabled)NEWLINE if config.has_option('ec2', 'all_elasticache_nodes') and self.elasticache_enabled:NEWLINE self.all_elasticache_nodes = config.getboolean('ec2', 'all_elasticache_nodes')NEWLINE else:NEWLINE self.all_elasticache_nodes = FalseNEWLINENEWLINE # boto configuration profile (prefer CLI argument then environment variables then config file)NEWLINE self.boto_profile = self.args.boto_profile or os.environ.get('AWS_PROFILE')NEWLINE if config.has_option('ec2', 'boto_profile') and not self.boto_profile:NEWLINE self.boto_profile = config.get('ec2', 'boto_profile')NEWLINENEWLINE # AWS credentials (prefer environment variables)NEWLINE if not (self.boto_profile or os.environ.get('AWS_ACCESS_KEY_ID') orNEWLINE os.environ.get('AWS_PROFILE')):NEWLINE if config.has_option('credentials', 'aws_access_key_id'):NEWLINE aws_access_key_id = config.get('credentials', 'aws_access_key_id')NEWLINE else:NEWLINE aws_access_key_id = NoneNEWLINE if config.has_option('credentials', 'aws_secret_access_key'):NEWLINE aws_secret_access_key = config.get('credentials', 'aws_secret_access_key')NEWLINE else:NEWLINE aws_secret_access_key = NoneNEWLINE if config.has_option('credentials', 'aws_security_token'):NEWLINE aws_security_token = config.get('credentials', 'aws_security_token')NEWLINE else:NEWLINE aws_security_token = NoneNEWLINE if aws_access_key_id:NEWLINE self.credentials = {NEWLINE 'aws_access_key_id': aws_access_key_id,NEWLINE 'aws_secret_access_key': aws_secret_access_keyNEWLINE }NEWLINE if aws_security_token:NEWLINE self.credentials['security_token'] = aws_security_tokenNEWLINENEWLINE # Cache relatedNEWLINE cache_dir = os.path.expanduser(config.get('ec2', 'cache_path'))NEWLINE if self.boto_profile:NEWLINE cache_dir = os.path.join(cache_dir, 'profile_' + self.boto_profile)NEWLINE if not os.path.exists(cache_dir):NEWLINE os.makedirs(cache_dir)NEWLINENEWLINE cache_name = 'ansible-ec2'NEWLINE cache_id = self.boto_profile or os.environ.get('AWS_ACCESS_KEY_ID', self.credentials.get('aws_access_key_id'))NEWLINE if cache_id:NEWLINE cache_name = '%s-%s' % (cache_name, cache_id)NEWLINE self.cache_path_cache = os.path.join(cache_dir, "%s.cache" % cache_name)NEWLINE self.cache_path_index = os.path.join(cache_dir, "%s.index" % cache_name)NEWLINE self.cache_max_age = config.getint('ec2', 'cache_max_age')NEWLINENEWLINE if config.has_option('ec2', 'expand_csv_tags'):NEWLINE self.expand_csv_tags = config.getboolean('ec2', 'expand_csv_tags')NEWLINE else:NEWLINE self.expand_csv_tags = FalseNEWLINENEWLINE # Configure nested groups instead of flat namespace.NEWLINE if config.has_option('ec2', 'nested_groups'):NEWLINE self.nested_groups = config.getboolean('ec2', 'nested_groups')NEWLINE else:NEWLINE self.nested_groups = FalseNEWLINENEWLINE # Replace dash or not in group namesNEWLINE if config.has_option('ec2', 'replace_dash_in_groups'):NEWLINE self.replace_dash_in_groups = config.getboolean('ec2', 'replace_dash_in_groups')NEWLINE else:NEWLINE self.replace_dash_in_groups = TrueNEWLINENEWLINE # IAM role to assume for connectionNEWLINE if config.has_option('ec2', 'iam_role'):NEWLINE self.iam_role = config.get('ec2', 'iam_role')NEWLINE else:NEWLINE self.iam_role = NoneNEWLINENEWLINE # Configure which groups should be created.NEWLINE group_by_options = [NEWLINE 'group_by_instance_id',NEWLINE 'group_by_region',NEWLINE 'group_by_availability_zone',NEWLINE 'group_by_ami_id',NEWLINE 'group_by_instance_type',NEWLINE 'group_by_instance_state',NEWLINE 'group_by_key_pair',NEWLINE 'group_by_vpc_id',NEWLINE 'group_by_security_group',NEWLINE 'group_by_tag_keys',NEWLINE 'group_by_tag_none',NEWLINE 'group_by_route53_names',NEWLINE 'group_by_rds_engine',NEWLINE 'group_by_rds_parameter_group',NEWLINE 'group_by_elasticache_engine',NEWLINE 'group_by_elasticache_cluster',NEWLINE 'group_by_elasticache_parameter_group',NEWLINE 'group_by_elasticache_replication_group',NEWLINE 'group_by_aws_account',NEWLINE ]NEWLINE for option in group_by_options:NEWLINE if config.has_option('ec2', option):NEWLINE setattr(self, option, config.getboolean('ec2', option))NEWLINE else:NEWLINE setattr(self, option, True)NEWLINENEWLINE # Do we need to just include hosts that match a pattern?NEWLINE try:NEWLINE pattern_include = config.get('ec2', 'pattern_include')NEWLINE if pattern_include and len(pattern_include) > 0:NEWLINE self.pattern_include = re.compile(pattern_include)NEWLINE else:NEWLINE self.pattern_include = NoneNEWLINE except configparser.NoOptionError:NEWLINE self.pattern_include = NoneNEWLINENEWLINE # Do we need to exclude hosts that match a pattern?NEWLINE try:NEWLINE pattern_exclude = config.get('ec2', 'pattern_exclude')NEWLINE if pattern_exclude and len(pattern_exclude) > 0:NEWLINE self.pattern_exclude = re.compile(pattern_exclude)NEWLINE else:NEWLINE self.pattern_exclude = NoneNEWLINE except configparser.NoOptionError:NEWLINE self.pattern_exclude = NoneNEWLINENEWLINE # Do we want to stack multiple filters?NEWLINE if config.has_option('ec2', 'stack_filters'):NEWLINE self.stack_filters = config.getboolean('ec2', 'stack_filters')NEWLINE else:NEWLINE self.stack_filters = FalseNEWLINENEWLINE # Instance filters (see boto and EC2 API docs). Ignore invalid filters.NEWLINE self.ec2_instance_filters = defaultdict(list)NEWLINE if config.has_option('ec2', 'instance_filters'):NEWLINENEWLINE filters = [f for f in config.get('ec2', 'instance_filters').split(',') if f]NEWLINENEWLINE for instance_filter in filters:NEWLINE instance_filter = instance_filter.strip()NEWLINE if not instance_filter or '=' not in instance_filter:NEWLINE continueNEWLINE filter_key, filter_value = [x.strip() for x in instance_filter.split('=', 1)]NEWLINE if not filter_key:NEWLINE continueNEWLINE self.ec2_instance_filters[filter_key].append(filter_value)NEWLINENEWLINE def parse_cli_args(self):NEWLINE ''' Command line argument processing '''NEWLINENEWLINE parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on EC2')NEWLINE parser.add_argument('--list', action='store_true', default=True,NEWLINE help='List instances (default: True)')NEWLINE parser.add_argument('--host', action='store',NEWLINE help='Get all the variables about a specific instance')NEWLINE parser.add_argument('--refresh-cache', action='store_true', default=False,NEWLINE help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)')NEWLINE parser.add_argument('--profile', '--boto-profile', action='store', dest='boto_profile',NEWLINE help='Use boto profile for connections to EC2')NEWLINE self.args = parser.parse_args()NEWLINENEWLINE def do_api_calls_update_cache(self):NEWLINE ''' Do API calls to each region, and save data in cache files '''NEWLINENEWLINE if self.route53_enabled:NEWLINE self.get_route53_records()NEWLINENEWLINE for region in self.regions:NEWLINE self.get_instances_by_region(region)NEWLINE if self.rds_enabled:NEWLINE self.get_rds_instances_by_region(region)NEWLINE if self.elasticache_enabled:NEWLINE self.get_elasticache_clusters_by_region(region)NEWLINE self.get_elasticache_replication_groups_by_region(region)NEWLINE if self.include_rds_clusters:NEWLINE self.include_rds_clusters_by_region(region)NEWLINENEWLINE self.write_to_cache(self.inventory, self.cache_path_cache)NEWLINE self.write_to_cache(self.index, self.cache_path_index)NEWLINENEWLINE def connect(self, region):NEWLINE ''' create connection to api server'''NEWLINE if self.eucalyptus:NEWLINE conn = boto.connect_euca(host=self.eucalyptus_host, **self.credentials)NEWLINE conn.APIVersion = '2010-08-31'NEWLINE else:NEWLINE conn = self.connect_to_aws(ec2, region)NEWLINE return connNEWLINENEWLINE def boto_fix_security_token_in_profile(self, connect_args):NEWLINE ''' monkey patch for boto issue boto/boto#2100 '''NEWLINE profile = 'profile ' + self.boto_profileNEWLINE if boto.config.has_option(profile, 'aws_security_token'):NEWLINE connect_args['security_token'] = boto.config.get(profile, 'aws_security_token')NEWLINE return connect_argsNEWLINENEWLINE def connect_to_aws(self, module, region):NEWLINE connect_args = self.credentialsNEWLINENEWLINE # only pass the profile name if it's set (as it is not supported by older boto versions)NEWLINE if self.boto_profile:NEWLINE connect_args['profile_name'] = self.boto_profileNEWLINE self.boto_fix_security_token_in_profile(connect_args)NEWLINENEWLINE if self.iam_role:NEWLINE sts_conn = sts.connect_to_region(region, **connect_args)NEWLINE role = sts_conn.assume_role(self.iam_role, 'ansible_dynamic_inventory')NEWLINE connect_args['aws_access_key_id'] = role.credentials.access_keyNEWLINE connect_args['aws_secret_access_key'] = role.credentials.secret_keyNEWLINE connect_args['security_token'] = role.credentials.session_tokenNEWLINENEWLINE conn = module.connect_to_region(region, **connect_args)NEWLINE # connect_to_region will fail "silently" by returning None if the region name is wrong or not supportedNEWLINE if conn is None:NEWLINE self.fail_with_error("region name: %s likely not supported, or AWS is down. connection to region failed." % region)NEWLINE return connNEWLINENEWLINE def get_instances_by_region(self, region):NEWLINE ''' Makes an AWS EC2 API call to the list of instances in a particularNEWLINE region '''NEWLINENEWLINE try:NEWLINE conn = self.connect(region)NEWLINE reservations = []NEWLINE if self.ec2_instance_filters:NEWLINE if self.stack_filters:NEWLINE filters_dict = {}NEWLINE for filter_key, filter_values in self.ec2_instance_filters.items():NEWLINE filters_dict[filter_key] = filter_valuesNEWLINE reservations.extend(conn.get_all_instances(filters=filters_dict))NEWLINE else:NEWLINE for filter_key, filter_values in self.ec2_instance_filters.items():NEWLINE reservations.extend(conn.get_all_instances(filters={filter_key: filter_values}))NEWLINE else:NEWLINE reservations = conn.get_all_instances()NEWLINENEWLINE # Pull the tags back in a second stepNEWLINE # AWS are on record as saying that the tags fetched in the first `get_all_instances` request are notNEWLINE # reliable and may be missing, and the only way to guarantee they are there is by calling `get_all_tags`NEWLINE instance_ids = []NEWLINE for reservation in reservations:NEWLINE instance_ids.extend([instance.id for instance in reservation.instances])NEWLINENEWLINE max_filter_value = 199NEWLINE tags = []NEWLINE for i in range(0, len(instance_ids), max_filter_value):NEWLINE tags.extend(conn.get_all_tags(filters={'resource-type': 'instance', 'resource-id': instance_ids[i:i + max_filter_value]}))NEWLINENEWLINE tags_by_instance_id = defaultdict(dict)NEWLINE for tag in tags:NEWLINE tags_by_instance_id[tag.res_id][tag.name] = tag.valueNEWLINENEWLINE if (not self.aws_account_id) and reservations:NEWLINE self.aws_account_id = reservations[0].owner_idNEWLINENEWLINE for reservation in reservations:NEWLINE for instance in reservation.instances:NEWLINE instance.tags = tags_by_instance_id[instance.id]NEWLINE self.add_instance(instance, region)NEWLINENEWLINE except boto.exception.BotoServerError as e:NEWLINE if e.error_code == 'AuthFailure':NEWLINE error = self.get_auth_error_message()NEWLINE else:NEWLINE backend = 'Eucalyptus' if self.eucalyptus else 'AWS'NEWLINE error = "Error connecting to %s backend.\n%s" % (backend, e.message)NEWLINE self.fail_with_error(error, 'getting EC2 instances')NEWLINENEWLINE def get_rds_instances_by_region(self, region):NEWLINE ''' Makes an AWS API call to the list of RDS instances in a particularNEWLINE region '''NEWLINENEWLINE if not HAS_BOTO3:NEWLINE self.fail_with_error("Working with RDS instances requires boto3 - please install boto3 and try again",NEWLINE "getting RDS instances")NEWLINENEWLINE client = ec2_utils.boto3_inventory_conn('client', 'rds', region, **self.credentials)NEWLINE db_instances = client.describe_db_instances()NEWLINENEWLINE try:NEWLINE conn = self.connect_to_aws(rds, region)NEWLINE if conn:NEWLINE marker = NoneNEWLINE while True:NEWLINE instances = conn.get_all_dbinstances(marker=marker)NEWLINE marker = instances.markerNEWLINE for index, instance in enumerate(instances):NEWLINE # Add tags to instances.NEWLINE instance.arn = db_instances['DBInstances'][index]['DBInstanceArn']NEWLINE tags = client.list_tags_for_resource(ResourceName=instance.arn)['TagList']NEWLINE instance.tags = {}NEWLINE for tag in tags:NEWLINE instance.tags[tag['Key']] = tag['Value']NEWLINENEWLINE self.add_rds_instance(instance, region)NEWLINE if not marker:NEWLINE breakNEWLINE except boto.exception.BotoServerError as e:NEWLINE error = e.reasonNEWLINENEWLINE if e.error_code == 'AuthFailure':NEWLINE error = self.get_auth_error_message()NEWLINE elif e.error_code == "OptInRequired":NEWLINE error = "RDS hasn't been enabled for this account yet. " \NEWLINE "You must either log in to the RDS service through the AWS console to enable it, " \NEWLINE "or set 'rds = False' in ec2.ini"NEWLINE elif not e.reason == "Forbidden":NEWLINE error = "Looks like AWS RDS is down:\n%s" % e.messageNEWLINE self.fail_with_error(error, 'getting RDS instances')NEWLINENEWLINE def include_rds_clusters_by_region(self, region):NEWLINE if not HAS_BOTO3:NEWLINE self.fail_with_error("Working with RDS clusters requires boto3 - please install boto3 and try again",NEWLINE "getting RDS clusters")NEWLINENEWLINE client = ec2_utils.boto3_inventory_conn('client', 'rds', region, **self.credentials)NEWLINENEWLINE marker, clusters = '', []NEWLINE while marker is not None:NEWLINE resp = client.describe_db_clusters(Marker=marker)NEWLINE clusters.extend(resp["DBClusters"])NEWLINE marker = resp.get('Marker', None)NEWLINENEWLINE account_id = boto.connect_iam().get_user().arn.split(':')[4]NEWLINE c_dict = {}NEWLINE for c in clusters:NEWLINE # remove these datetime objects as there is no serialisation to jsonNEWLINE # currently in place and we don't need the data yetNEWLINE if 'EarliestRestorableTime' in c:NEWLINE del c['EarliestRestorableTime']NEWLINE if 'LatestRestorableTime' in c:NEWLINE del c['LatestRestorableTime']NEWLINENEWLINE if self.ec2_instance_filters == {}:NEWLINE matches_filter = TrueNEWLINE else:NEWLINE matches_filter = FalseNEWLINENEWLINE try:NEWLINE # arn:aws:rds:<region>:<account number>:<resourcetype>:<name>NEWLINE tags = client.list_tags_for_resource(NEWLINE ResourceName='arn:aws:rds:' + region + ':' + account_id + ':cluster:' + c['DBClusterIdentifier'])NEWLINE c['Tags'] = tags['TagList']NEWLINENEWLINE if self.ec2_instance_filters:NEWLINE for filter_key, filter_values in self.ec2_instance_filters.items():NEWLINE # get AWS tag key e.g. tag:env will be 'env'NEWLINE tag_name = filter_key.split(":", 1)[1]NEWLINE # Filter values is a list (if you put multiple values for the same tag name)NEWLINE matches_filter = any(d['Key'] == tag_name and d['Value'] in filter_values for d in c['Tags'])NEWLINENEWLINE if matches_filter:NEWLINE # it matches a filter, so stop looking for further matchesNEWLINE breakNEWLINENEWLINE except Exception as e:NEWLINE if e.message.find('DBInstanceNotFound') >= 0:NEWLINE # AWS RDS bug (2016-01-06) means deletion does not fully complete and leave an 'empty' cluster.NEWLINE # Ignore errors when trying to find tags for theseNEWLINE passNEWLINENEWLINE # ignore empty clusters caused by AWS bugNEWLINE if len(c['DBClusterMembers']) == 0:NEWLINE continueNEWLINE elif matches_filter:NEWLINE c_dict[c['DBClusterIdentifier']] = cNEWLINENEWLINE self.inventory['db_clusters'] = c_dictNEWLINENEWLINE def get_elasticache_clusters_by_region(self, region):NEWLINE ''' Makes an AWS API call to the list of ElastiCache clusters (withNEWLINE nodes' info) in a particular region.'''NEWLINENEWLINE # ElastiCache boto module doesn't provide a get_all_instances method,NEWLINE # that's why we need to call describe directly (it would be called byNEWLINE # the shorthand method anyway...)NEWLINE try:NEWLINE conn = self.connect_to_aws(elasticache, region)NEWLINE if conn:NEWLINE # show_cache_node_info = TrueNEWLINE # because we also want nodes' informationNEWLINE response = conn.describe_cache_clusters(None, None, None, True)NEWLINENEWLINE except boto.exception.BotoServerError as e:NEWLINE error = e.reasonNEWLINENEWLINE if e.error_code == 'AuthFailure':NEWLINE error = self.get_auth_error_message()NEWLINE elif e.error_code == "OptInRequired":NEWLINE error = "ElastiCache hasn't been enabled for this account yet. " \NEWLINE "You must either log in to the ElastiCache service through the AWS console to enable it, " \NEWLINE "or set 'elasticache = False' in ec2.ini"NEWLINE elif not e.reason == "Forbidden":NEWLINE error = "Looks like AWS ElastiCache is down:\n%s" % e.messageNEWLINE self.fail_with_error(error, 'getting ElastiCache clusters')NEWLINENEWLINE try:NEWLINE # Boto also doesn't provide wrapper classes to CacheClusters orNEWLINE # CacheNodes. Because of that we can't make use of the get_listNEWLINE # method in the AWSQueryConnection. Let's do the work manuallyNEWLINE clusters = response['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['CacheClusters']NEWLINENEWLINE except KeyError as e:NEWLINE error = "ElastiCache query to AWS failed (unexpected format)."NEWLINE self.fail_with_error(error, 'getting ElastiCache clusters')NEWLINENEWLINE for cluster in clusters:NEWLINE self.add_elasticache_cluster(cluster, region)NEWLINENEWLINE def get_elasticache_replication_groups_by_region(self, region):NEWLINE ''' Makes an AWS API call to the list of ElastiCache replication groupsNEWLINE in a particular region.'''NEWLINENEWLINE # ElastiCache boto module doesn't provide a get_all_instances method,NEWLINE # that's why we need to call describe directly (it would be called byNEWLINE # the shorthand method anyway...)NEWLINE try:NEWLINE conn = self.connect_to_aws(elasticache, region)NEWLINE if conn:NEWLINE response = conn.describe_replication_groups()NEWLINENEWLINE except boto.exception.BotoServerError as e:NEWLINE error = e.reasonNEWLINENEWLINE if e.error_code == 'AuthFailure':NEWLINE error = self.get_auth_error_message()NEWLINE if not e.reason == "Forbidden":NEWLINE error = "Looks like AWS ElastiCache [Replication Groups] is down:\n%s" % e.messageNEWLINE self.fail_with_error(error, 'getting ElastiCache clusters')NEWLINENEWLINE try:NEWLINE # Boto also doesn't provide wrapper classes to ReplicationGroupsNEWLINE # Because of that we can't make use of the get_list method in theNEWLINE # AWSQueryConnection. Let's do the work manuallyNEWLINE replication_groups = response['DescribeReplicationGroupsResponse']['DescribeReplicationGroupsResult']['ReplicationGroups']NEWLINENEWLINE except KeyError as e:NEWLINE error = "ElastiCache [Replication Groups] query to AWS failed (unexpected format)."NEWLINE self.fail_with_error(error, 'getting ElastiCache clusters')NEWLINENEWLINE for replication_group in replication_groups:NEWLINE self.add_elasticache_replication_group(replication_group, region)NEWLINENEWLINE def get_auth_error_message(self):NEWLINE ''' create an informative error message if there is an issue authenticating'''NEWLINE errors = ["Authentication error retrieving ec2 inventory."]NEWLINE if None in [os.environ.get('AWS_ACCESS_KEY_ID'), os.environ.get('AWS_SECRET_ACCESS_KEY')]:NEWLINE errors.append(' - No AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY environment vars found')NEWLINE else:NEWLINE errors.append(' - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment vars found but may not be correct')NEWLINENEWLINE boto_paths = ['/etc/boto.cfg', '~/.boto', '~/.aws/credentials']NEWLINE boto_config_found = list(p for p in boto_paths if os.path.isfile(os.path.expanduser(p)))NEWLINE if len(boto_config_found) > 0:NEWLINE errors.append(" - Boto configs found at '%s', but the credentials contained may not be correct" % ', '.join(boto_config_found))NEWLINE else:NEWLINE errors.append(" - No Boto config found at any expected location '%s'" % ', '.join(boto_paths))NEWLINENEWLINE return '\n'.join(errors)NEWLINENEWLINE def fail_with_error(self, err_msg, err_operation=None):NEWLINE '''log an error to std err for ansible-playbook to consume and exit'''NEWLINE if err_operation:NEWLINE err_msg = 'ERROR: "{err_msg}", while: {err_operation}'.format(NEWLINE err_msg=err_msg, err_operation=err_operation)NEWLINE sys.stderr.write(err_msg)NEWLINE sys.exit(1)NEWLINENEWLINE def get_instance(self, region, instance_id):NEWLINE conn = self.connect(region)NEWLINENEWLINE reservations = conn.get_all_instances([instance_id])NEWLINE for reservation in reservations:NEWLINE for instance in reservation.instances:NEWLINE return instanceNEWLINENEWLINE def add_instance(self, instance, region):NEWLINE ''' Adds an instance to the inventory and index, as long as it isNEWLINE addressable '''NEWLINENEWLINE # Only return instances with desired instance statesNEWLINE if instance.state not in self.ec2_instance_states:NEWLINE returnNEWLINENEWLINE # Select the best destination addressNEWLINE if self.destination_format and self.destination_format_tags:NEWLINE dest = self.destination_format.format(*[getattr(instance, 'tags').get(tag, '') for tag in self.destination_format_tags])NEWLINE elif instance.subnet_id:NEWLINE dest = getattr(instance, self.vpc_destination_variable, None)NEWLINE if dest is None:NEWLINE dest = getattr(instance, 'tags').get(self.vpc_destination_variable, None)NEWLINE else:NEWLINE dest = getattr(instance, self.destination_variable, None)NEWLINE if dest is None:NEWLINE dest = getattr(instance, 'tags').get(self.destination_variable, None)NEWLINENEWLINE if not dest:NEWLINE # Skip instances we cannot address (e.g. private VPC subnet)NEWLINE returnNEWLINENEWLINE # Set the inventory nameNEWLINE hostname = NoneNEWLINE if self.hostname_variable:NEWLINE if self.hostname_variable.startswith('tag_'):NEWLINE hostname = instance.tags.get(self.hostname_variable[4:], None)NEWLINE else:NEWLINE hostname = getattr(instance, self.hostname_variable)NEWLINENEWLINE # set the hostname from route53NEWLINE if self.route53_enabled and self.route53_hostnames:NEWLINE route53_names = self.get_instance_route53_names(instance)NEWLINE for name in route53_names:NEWLINE if name.endswith(self.route53_hostnames):NEWLINE hostname = nameNEWLINENEWLINE # If we can't get a nice hostname, use the destination addressNEWLINE if not hostname:NEWLINE hostname = destNEWLINE # to_safe strips hostname characters like dots, so don't strip route53 hostnamesNEWLINE elif self.route53_enabled and self.route53_hostnames and hostname.endswith(self.route53_hostnames):NEWLINE hostname = hostname.lower()NEWLINE else:NEWLINE hostname = self.to_safe(hostname).lower()NEWLINENEWLINE # if we only want to include hosts that match a pattern, skip those that don'tNEWLINE if self.pattern_include and not self.pattern_include.match(hostname):NEWLINE returnNEWLINENEWLINE # if we need to exclude hosts that match a pattern, skip thoseNEWLINE if self.pattern_exclude and self.pattern_exclude.match(hostname):NEWLINE returnNEWLINENEWLINE # Add to indexNEWLINE self.index[hostname] = [region, instance.id]NEWLINENEWLINE # Inventory: Group by instance ID (always a group of 1)NEWLINE if self.group_by_instance_id:NEWLINE self.inventory[instance.id] = [hostname]NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'instances', instance.id)NEWLINENEWLINE # Inventory: Group by regionNEWLINE if self.group_by_region:NEWLINE self.push(self.inventory, region, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'regions', region)NEWLINENEWLINE # Inventory: Group by availability zoneNEWLINE if self.group_by_availability_zone:NEWLINE self.push(self.inventory, instance.placement, hostname)NEWLINE if self.nested_groups:NEWLINE if self.group_by_region:NEWLINE self.push_group(self.inventory, region, instance.placement)NEWLINE self.push_group(self.inventory, 'zones', instance.placement)NEWLINENEWLINE # Inventory: Group by Amazon Machine Image (AMI) IDNEWLINE if self.group_by_ami_id:NEWLINE ami_id = self.to_safe(instance.image_id)NEWLINE self.push(self.inventory, ami_id, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'images', ami_id)NEWLINENEWLINE # Inventory: Group by instance typeNEWLINE if self.group_by_instance_type:NEWLINE type_name = self.to_safe('type_' + instance.instance_type)NEWLINE self.push(self.inventory, type_name, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'types', type_name)NEWLINENEWLINE # Inventory: Group by instance stateNEWLINE if self.group_by_instance_state:NEWLINE state_name = self.to_safe('instance_state_' + instance.state)NEWLINE self.push(self.inventory, state_name, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'instance_states', state_name)NEWLINENEWLINE # Inventory: Group by key pairNEWLINE if self.group_by_key_pair and instance.key_name:NEWLINE key_name = self.to_safe('key_' + instance.key_name)NEWLINE self.push(self.inventory, key_name, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'keys', key_name)NEWLINENEWLINE # Inventory: Group by VPCNEWLINE if self.group_by_vpc_id and instance.vpc_id:NEWLINE vpc_id_name = self.to_safe('vpc_id_' + instance.vpc_id)NEWLINE self.push(self.inventory, vpc_id_name, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'vpcs', vpc_id_name)NEWLINENEWLINE # Inventory: Group by security groupNEWLINE if self.group_by_security_group:NEWLINE try:NEWLINE for group in instance.groups:NEWLINE key = self.to_safe("security_group_" + group.name)NEWLINE self.push(self.inventory, key, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'security_groups', key)NEWLINE except AttributeError:NEWLINE self.fail_with_error('\n'.join(['Package boto seems a bit older.',NEWLINE 'Please upgrade boto >= 2.3.0.']))NEWLINENEWLINE # Inventory: Group by AWS account IDNEWLINE if self.group_by_aws_account:NEWLINE self.push(self.inventory, self.aws_account_id, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'accounts', self.aws_account_id)NEWLINENEWLINE # Inventory: Group by tag keysNEWLINE if self.group_by_tag_keys:NEWLINE for k, v in instance.tags.items():NEWLINE if self.expand_csv_tags and v and ',' in v:NEWLINE values = map(lambda x: x.strip(), v.split(','))NEWLINE else:NEWLINE values = [v]NEWLINENEWLINE for v in values:NEWLINE if v:NEWLINE key = self.to_safe("tag_" + k + "=" + v)NEWLINE else:NEWLINE key = self.to_safe("tag_" + k)NEWLINE self.push(self.inventory, key, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k))NEWLINE if v:NEWLINE self.push_group(self.inventory, self.to_safe("tag_" + k), key)NEWLINENEWLINE # Inventory: Group by Route53 domain names if enabledNEWLINE if self.route53_enabled and self.group_by_route53_names:NEWLINE route53_names = self.get_instance_route53_names(instance)NEWLINE for name in route53_names:NEWLINE self.push(self.inventory, name, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'route53', name)NEWLINENEWLINE # Global Tag: instances without tagsNEWLINE if self.group_by_tag_none and len(instance.tags) == 0:NEWLINE self.push(self.inventory, 'tag_none', hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'tags', 'tag_none')NEWLINENEWLINE # Global Tag: tag all EC2 instancesNEWLINE self.push(self.inventory, 'ec2', hostname)NEWLINENEWLINE self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)NEWLINE self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = destNEWLINENEWLINE def add_rds_instance(self, instance, region):NEWLINE ''' Adds an RDS instance to the inventory and index, as long as it isNEWLINE addressable '''NEWLINENEWLINE # Only want available instances unless all_rds_instances is TrueNEWLINE if not self.all_rds_instances and instance.status != 'available':NEWLINE returnNEWLINENEWLINE # Select the best destination addressNEWLINE dest = instance.endpoint[0]NEWLINENEWLINE if not dest:NEWLINE # Skip instances we cannot address (e.g. private VPC subnet)NEWLINE returnNEWLINENEWLINE # Set the inventory nameNEWLINE hostname = NoneNEWLINE if self.hostname_variable:NEWLINE if self.hostname_variable.startswith('tag_'):NEWLINE hostname = instance.tags.get(self.hostname_variable[4:], None)NEWLINE else:NEWLINE hostname = getattr(instance, self.hostname_variable)NEWLINENEWLINE # If we can't get a nice hostname, use the destination addressNEWLINE if not hostname:NEWLINE hostname = destNEWLINENEWLINE hostname = self.to_safe(hostname).lower()NEWLINENEWLINE # Add to indexNEWLINE self.index[hostname] = [region, instance.id]NEWLINENEWLINE # Inventory: Group by instance ID (always a group of 1)NEWLINE if self.group_by_instance_id:NEWLINE self.inventory[instance.id] = [hostname]NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'instances', instance.id)NEWLINENEWLINE # Inventory: Group by regionNEWLINE if self.group_by_region:NEWLINE self.push(self.inventory, region, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'regions', region)NEWLINENEWLINE # Inventory: Group by availability zoneNEWLINE if self.group_by_availability_zone:NEWLINE self.push(self.inventory, instance.availability_zone, hostname)NEWLINE if self.nested_groups:NEWLINE if self.group_by_region:NEWLINE self.push_group(self.inventory, region, instance.availability_zone)NEWLINE self.push_group(self.inventory, 'zones', instance.availability_zone)NEWLINENEWLINE # Inventory: Group by instance typeNEWLINE if self.group_by_instance_type:NEWLINE type_name = self.to_safe('type_' + instance.instance_class)NEWLINE self.push(self.inventory, type_name, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'types', type_name)NEWLINENEWLINE # Inventory: Group by VPCNEWLINE if self.group_by_vpc_id and instance.subnet_group and instance.subnet_group.vpc_id:NEWLINE vpc_id_name = self.to_safe('vpc_id_' + instance.subnet_group.vpc_id)NEWLINE self.push(self.inventory, vpc_id_name, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'vpcs', vpc_id_name)NEWLINENEWLINE # Inventory: Group by security groupNEWLINE if self.group_by_security_group:NEWLINE try:NEWLINE if instance.security_group:NEWLINE key = self.to_safe("security_group_" + instance.security_group.name)NEWLINE self.push(self.inventory, key, hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'security_groups', key)NEWLINENEWLINE except AttributeError:NEWLINE self.fail_with_error('\n'.join(['Package boto seems a bit older.',NEWLINE 'Please upgrade boto >= 2.3.0.']))NEWLINENEWLINE # Inventory: Group by engineNEWLINE if self.group_by_rds_engine:NEWLINE self.push(self.inventory, self.to_safe("rds_" + instance.engine), hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'rds_engines', self.to_safe("rds_" + instance.engine))NEWLINENEWLINE # Inventory: Group by parameter groupNEWLINE if self.group_by_rds_parameter_group:NEWLINE self.push(self.inventory, self.to_safe("rds_parameter_group_" + instance.parameter_group.name), hostname)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'rds_parameter_groups', self.to_safe("rds_parameter_group_" + instance.parameter_group.name))NEWLINENEWLINE # Global Tag: all RDS instancesNEWLINE self.push(self.inventory, 'rds', hostname)NEWLINENEWLINE self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance)NEWLINE self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = destNEWLINENEWLINE def add_elasticache_cluster(self, cluster, region):NEWLINE ''' Adds an ElastiCache cluster to the inventory and index, as long asNEWLINE it's nodes are addressable '''NEWLINENEWLINE # Only want available clusters unless all_elasticache_clusters is TrueNEWLINE if not self.all_elasticache_clusters and cluster['CacheClusterStatus'] != 'available':NEWLINE returnNEWLINENEWLINE # Select the best destination addressNEWLINE if 'ConfigurationEndpoint' in cluster and cluster['ConfigurationEndpoint']:NEWLINE # Memcached clusterNEWLINE dest = cluster['ConfigurationEndpoint']['Address']NEWLINE is_redis = FalseNEWLINE else:NEWLINE # Redis sigle node clusterNEWLINE # Because all Redis clusters are single nodes, we'll merge theNEWLINE # info from the cluster with info about the nodeNEWLINE dest = cluster['CacheNodes'][0]['Endpoint']['Address']NEWLINE is_redis = TrueNEWLINENEWLINE if not dest:NEWLINE # Skip clusters we cannot address (e.g. private VPC subnet)NEWLINE returnNEWLINENEWLINE # Add to indexNEWLINE self.index[dest] = [region, cluster['CacheClusterId']]NEWLINENEWLINE # Inventory: Group by instance ID (always a group of 1)NEWLINE if self.group_by_instance_id:NEWLINE self.inventory[cluster['CacheClusterId']] = [dest]NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'instances', cluster['CacheClusterId'])NEWLINENEWLINE # Inventory: Group by regionNEWLINE if self.group_by_region and not is_redis:NEWLINE self.push(self.inventory, region, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'regions', region)NEWLINENEWLINE # Inventory: Group by availability zoneNEWLINE if self.group_by_availability_zone and not is_redis:NEWLINE self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest)NEWLINE if self.nested_groups:NEWLINE if self.group_by_region:NEWLINE self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone'])NEWLINE self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone'])NEWLINENEWLINE # Inventory: Group by node typeNEWLINE if self.group_by_instance_type and not is_redis:NEWLINE type_name = self.to_safe('type_' + cluster['CacheNodeType'])NEWLINE self.push(self.inventory, type_name, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'types', type_name)NEWLINENEWLINE # Inventory: Group by VPC (information not available in the currentNEWLINE # AWS API version for ElastiCache)NEWLINENEWLINE # Inventory: Group by security groupNEWLINE if self.group_by_security_group and not is_redis:NEWLINENEWLINE # Check for the existence of the 'SecurityGroups' key and also ifNEWLINE # this key has some value. When the cluster is not placed in a SGNEWLINE # the query can return None here and cause an error.NEWLINE if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None:NEWLINE for security_group in cluster['SecurityGroups']:NEWLINE key = self.to_safe("security_group_" + security_group['SecurityGroupId'])NEWLINE self.push(self.inventory, key, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'security_groups', key)NEWLINENEWLINE # Inventory: Group by engineNEWLINE if self.group_by_elasticache_engine and not is_redis:NEWLINE self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'elasticache_engines', self.to_safe(cluster['Engine']))NEWLINENEWLINE # Inventory: Group by parameter groupNEWLINE if self.group_by_elasticache_parameter_group:NEWLINE self.push(self.inventory, self.to_safe("elasticache_parameter_group_" + cluster['CacheParameterGroup']['CacheParameterGroupName']), dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'elasticache_parameter_groups', self.to_safe(cluster['CacheParameterGroup']['CacheParameterGroupName']))NEWLINENEWLINE # Inventory: Group by replication groupNEWLINE if self.group_by_elasticache_replication_group and 'ReplicationGroupId' in cluster and cluster['ReplicationGroupId']:NEWLINE self.push(self.inventory, self.to_safe("elasticache_replication_group_" + cluster['ReplicationGroupId']), dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'elasticache_replication_groups', self.to_safe(cluster['ReplicationGroupId']))NEWLINENEWLINE # Global Tag: all ElastiCache clustersNEWLINE self.push(self.inventory, 'elasticache_clusters', cluster['CacheClusterId'])NEWLINENEWLINE host_info = self.get_host_info_dict_from_describe_dict(cluster)NEWLINENEWLINE self.inventory["_meta"]["hostvars"][dest] = host_infoNEWLINENEWLINE # Add the nodesNEWLINE for node in cluster['CacheNodes']:NEWLINE self.add_elasticache_node(node, cluster, region)NEWLINENEWLINE def add_elasticache_node(self, node, cluster, region):NEWLINE ''' Adds an ElastiCache node to the inventory and index, as long asNEWLINE it is addressable '''NEWLINENEWLINE # Only want available nodes unless all_elasticache_nodes is TrueNEWLINE if not self.all_elasticache_nodes and node['CacheNodeStatus'] != 'available':NEWLINE returnNEWLINENEWLINE # Select the best destination addressNEWLINE dest = node['Endpoint']['Address']NEWLINENEWLINE if not dest:NEWLINE # Skip nodes we cannot address (e.g. private VPC subnet)NEWLINE returnNEWLINENEWLINE node_id = self.to_safe(cluster['CacheClusterId'] + '_' + node['CacheNodeId'])NEWLINENEWLINE # Add to indexNEWLINE self.index[dest] = [region, node_id]NEWLINENEWLINE # Inventory: Group by node ID (always a group of 1)NEWLINE if self.group_by_instance_id:NEWLINE self.inventory[node_id] = [dest]NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'instances', node_id)NEWLINENEWLINE # Inventory: Group by regionNEWLINE if self.group_by_region:NEWLINE self.push(self.inventory, region, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'regions', region)NEWLINENEWLINE # Inventory: Group by availability zoneNEWLINE if self.group_by_availability_zone:NEWLINE self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest)NEWLINE if self.nested_groups:NEWLINE if self.group_by_region:NEWLINE self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone'])NEWLINE self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone'])NEWLINENEWLINE # Inventory: Group by node typeNEWLINE if self.group_by_instance_type:NEWLINE type_name = self.to_safe('type_' + cluster['CacheNodeType'])NEWLINE self.push(self.inventory, type_name, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'types', type_name)NEWLINENEWLINE # Inventory: Group by VPC (information not available in the currentNEWLINE # AWS API version for ElastiCache)NEWLINENEWLINE # Inventory: Group by security groupNEWLINE if self.group_by_security_group:NEWLINENEWLINE # Check for the existence of the 'SecurityGroups' key and also ifNEWLINE # this key has some value. When the cluster is not placed in a SGNEWLINE # the query can return None here and cause an error.NEWLINE if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None:NEWLINE for security_group in cluster['SecurityGroups']:NEWLINE key = self.to_safe("security_group_" + security_group['SecurityGroupId'])NEWLINE self.push(self.inventory, key, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'security_groups', key)NEWLINENEWLINE # Inventory: Group by engineNEWLINE if self.group_by_elasticache_engine:NEWLINE self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'elasticache_engines', self.to_safe("elasticache_" + cluster['Engine']))NEWLINENEWLINE # Inventory: Group by parameter group (done at cluster level)NEWLINENEWLINE # Inventory: Group by replication group (done at cluster level)NEWLINENEWLINE # Inventory: Group by ElastiCache ClusterNEWLINE if self.group_by_elasticache_cluster:NEWLINE self.push(self.inventory, self.to_safe("elasticache_cluster_" + cluster['CacheClusterId']), dest)NEWLINENEWLINE # Global Tag: all ElastiCache nodesNEWLINE self.push(self.inventory, 'elasticache_nodes', dest)NEWLINENEWLINE host_info = self.get_host_info_dict_from_describe_dict(node)NEWLINENEWLINE if dest in self.inventory["_meta"]["hostvars"]:NEWLINE self.inventory["_meta"]["hostvars"][dest].update(host_info)NEWLINE else:NEWLINE self.inventory["_meta"]["hostvars"][dest] = host_infoNEWLINENEWLINE def add_elasticache_replication_group(self, replication_group, region):NEWLINE ''' Adds an ElastiCache replication group to the inventory and index '''NEWLINENEWLINE # Only want available clusters unless all_elasticache_replication_groups is TrueNEWLINE if not self.all_elasticache_replication_groups and replication_group['Status'] != 'available':NEWLINE returnNEWLINENEWLINE # Skip clusters we cannot address (e.g. private VPC subnet or clustered redis)NEWLINE if replication_group['NodeGroups'][0]['PrimaryEndpoint'] is None or \NEWLINE replication_group['NodeGroups'][0]['PrimaryEndpoint']['Address'] is None:NEWLINE returnNEWLINENEWLINE # Select the best destination address (PrimaryEndpoint)NEWLINE dest = replication_group['NodeGroups'][0]['PrimaryEndpoint']['Address']NEWLINENEWLINE # Add to indexNEWLINE self.index[dest] = [region, replication_group['ReplicationGroupId']]NEWLINENEWLINE # Inventory: Group by ID (always a group of 1)NEWLINE if self.group_by_instance_id:NEWLINE self.inventory[replication_group['ReplicationGroupId']] = [dest]NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'instances', replication_group['ReplicationGroupId'])NEWLINENEWLINE # Inventory: Group by regionNEWLINE if self.group_by_region:NEWLINE self.push(self.inventory, region, dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'regions', region)NEWLINENEWLINE # Inventory: Group by availability zone (doesn't apply to replication groups)NEWLINENEWLINE # Inventory: Group by node type (doesn't apply to replication groups)NEWLINENEWLINE # Inventory: Group by VPC (information not available in the currentNEWLINE # AWS API version for replication groupsNEWLINENEWLINE # Inventory: Group by security group (doesn't apply to replication groups)NEWLINE # Check this value in cluster levelNEWLINENEWLINE # Inventory: Group by engine (replication groups are always Redis)NEWLINE if self.group_by_elasticache_engine:NEWLINE self.push(self.inventory, 'elasticache_redis', dest)NEWLINE if self.nested_groups:NEWLINE self.push_group(self.inventory, 'elasticache_engines', 'redis')NEWLINENEWLINE # Global Tag: all ElastiCache clustersNEWLINE self.push(self.inventory, 'elasticache_replication_groups', replication_group['ReplicationGroupId'])NEWLINENEWLINE host_info = self.get_host_info_dict_from_describe_dict(replication_group)NEWLINENEWLINE self.inventory["_meta"]["hostvars"][dest] = host_infoNEWLINENEWLINE def get_route53_records(self):NEWLINE ''' Get and store the map of resource records to domain names thatNEWLINE point to them. '''NEWLINENEWLINE if self.boto_profile:NEWLINE r53_conn = route53.Route53Connection(profile_name=self.boto_profile)NEWLINE else:NEWLINE r53_conn = route53.Route53Connection()NEWLINE all_zones = r53_conn.get_zones()NEWLINENEWLINE route53_zones = [zone for zone in all_zones if zone.name[:-1] not in self.route53_excluded_zones]NEWLINENEWLINE self.route53_records = {}NEWLINENEWLINE for zone in route53_zones:NEWLINE rrsets = r53_conn.get_all_rrsets(zone.id)NEWLINENEWLINE for record_set in rrsets:NEWLINE record_name = record_set.nameNEWLINENEWLINE if record_name.endswith('.'):NEWLINE record_name = record_name[:-1]NEWLINENEWLINE for resource in record_set.resource_records:NEWLINE self.route53_records.setdefault(resource, set())NEWLINE self.route53_records[resource].add(record_name)NEWLINENEWLINE def get_instance_route53_names(self, instance):NEWLINE ''' Check if an instance is referenced in the records we have fromNEWLINE Route53. If it is, return the list of domain names pointing to saidNEWLINE instance. If nothing points to it, return an empty list. '''NEWLINENEWLINE instance_attributes = ['public_dns_name', 'private_dns_name',NEWLINE 'ip_address', 'private_ip_address']NEWLINENEWLINE name_list = set()NEWLINENEWLINE for attrib in instance_attributes:NEWLINE try:NEWLINE value = getattr(instance, attrib)NEWLINE except AttributeError:NEWLINE continueNEWLINENEWLINE if value in self.route53_records:NEWLINE name_list.update(self.route53_records[value])NEWLINENEWLINE return list(name_list)NEWLINENEWLINE def get_host_info_dict_from_instance(self, instance):NEWLINE instance_vars = {}NEWLINE for key in vars(instance):NEWLINE value = getattr(instance, key)NEWLINE key = self.to_safe('ec2_' + key)NEWLINENEWLINE # Handle complex typesNEWLINE # state/previous_state changed to properties in boto in https://github.com/boto/boto/commit/a23c379837f698212252720d2af8dec0325c9518NEWLINE if key == 'ec2__state':NEWLINE instance_vars['ec2_state'] = instance.state or ''NEWLINE instance_vars['ec2_state_code'] = instance.state_codeNEWLINE elif key == 'ec2__previous_state':NEWLINE instance_vars['ec2_previous_state'] = instance.previous_state or ''NEWLINE instance_vars['ec2_previous_state_code'] = instance.previous_state_codeNEWLINE elif isinstance(value, (int, bool)):NEWLINE instance_vars[key] = valueNEWLINE elif isinstance(value, six.string_types):NEWLINE instance_vars[key] = value.strip()NEWLINE elif value is None:NEWLINE instance_vars[key] = ''NEWLINE elif key == 'ec2_region':NEWLINE instance_vars[key] = value.nameNEWLINE elif key == 'ec2__placement':NEWLINE instance_vars['ec2_placement'] = value.zoneNEWLINE elif key == 'ec2_tags':NEWLINE for k, v in value.items():NEWLINE if self.expand_csv_tags and ',' in v:NEWLINE v = list(map(lambda x: x.strip(), v.split(',')))NEWLINE key = self.to_safe('ec2_tag_' + k)NEWLINE instance_vars[key] = vNEWLINE elif key == 'ec2_groups':NEWLINE group_ids = []NEWLINE group_names = []NEWLINE for group in value:NEWLINE group_ids.append(group.id)NEWLINE group_names.append(group.name)NEWLINE instance_vars["ec2_security_group_ids"] = ','.join([str(i) for i in group_ids])NEWLINE instance_vars["ec2_security_group_names"] = ','.join([str(i) for i in group_names])NEWLINE elif key == 'ec2_block_device_mapping':NEWLINE instance_vars["ec2_block_devices"] = {}NEWLINE for k, v in value.items():NEWLINE instance_vars["ec2_block_devices"][os.path.basename(k)] = v.volume_idNEWLINE else:NEWLINE passNEWLINE # TODO Product codes if someone finds them usefulNEWLINE # print keyNEWLINE # print type(value)NEWLINE # print valueNEWLINENEWLINE instance_vars[self.to_safe('ec2_account_id')] = self.aws_account_idNEWLINENEWLINE return instance_varsNEWLINENEWLINE def get_host_info_dict_from_describe_dict(self, describe_dict):NEWLINE ''' Parses the dictionary returned by the API call into a flat listNEWLINE of parameters. This method should be used only when 'describe' isNEWLINE used directly because Boto doesn't provide specific classes. '''NEWLINENEWLINE # I really don't agree with prefixing everything with 'ec2'NEWLINE # because EC2, RDS and ElastiCache are different services.NEWLINE # I'm just following the pattern used until now to not break anyNEWLINE # compatibility.NEWLINENEWLINE host_info = {}NEWLINE for key in describe_dict:NEWLINE value = describe_dict[key]NEWLINE key = self.to_safe('ec2_' + self.uncammelize(key))NEWLINENEWLINE # Handle complex typesNEWLINENEWLINE # Target: Memcached Cache ClustersNEWLINE if key == 'ec2_configuration_endpoint' and value:NEWLINE host_info['ec2_configuration_endpoint_address'] = value['Address']NEWLINE host_info['ec2_configuration_endpoint_port'] = value['Port']NEWLINENEWLINE # Target: Cache Nodes and Redis Cache Clusters (single node)NEWLINE if key == 'ec2_endpoint' and value:NEWLINE host_info['ec2_endpoint_address'] = value['Address']NEWLINE host_info['ec2_endpoint_port'] = value['Port']NEWLINENEWLINE # Target: Redis Replication GroupsNEWLINE if key == 'ec2_node_groups' and value:NEWLINE host_info['ec2_endpoint_address'] = value[0]['PrimaryEndpoint']['Address']NEWLINE host_info['ec2_endpoint_port'] = value[0]['PrimaryEndpoint']['Port']NEWLINE replica_count = 0NEWLINE for node in value[0]['NodeGroupMembers']:NEWLINE if node['CurrentRole'] == 'primary':NEWLINE host_info['ec2_primary_cluster_address'] = node['ReadEndpoint']['Address']NEWLINE host_info['ec2_primary_cluster_port'] = node['ReadEndpoint']['Port']NEWLINE host_info['ec2_primary_cluster_id'] = node['CacheClusterId']NEWLINE elif node['CurrentRole'] == 'replica':NEWLINE host_info['ec2_replica_cluster_address_' + str(replica_count)] = node['ReadEndpoint']['Address']NEWLINE host_info['ec2_replica_cluster_port_' + str(replica_count)] = node['ReadEndpoint']['Port']NEWLINE host_info['ec2_replica_cluster_id_' + str(replica_count)] = node['CacheClusterId']NEWLINE replica_count += 1NEWLINENEWLINE # Target: Redis Replication GroupsNEWLINE if key == 'ec2_member_clusters' and value:NEWLINE host_info['ec2_member_clusters'] = ','.join([str(i) for i in value])NEWLINENEWLINE # Target: All Cache ClustersNEWLINE elif key == 'ec2_cache_parameter_group':NEWLINE host_info["ec2_cache_node_ids_to_reboot"] = ','.join([str(i) for i in value['CacheNodeIdsToReboot']])NEWLINE host_info['ec2_cache_parameter_group_name'] = value['CacheParameterGroupName']NEWLINE host_info['ec2_cache_parameter_apply_status'] = value['ParameterApplyStatus']NEWLINENEWLINE # Target: Almost everythingNEWLINE elif key == 'ec2_security_groups':NEWLINENEWLINE # Skip if SecurityGroups is NoneNEWLINE # (it is possible to have the key defined but no value in it).NEWLINE if value is not None:NEWLINE sg_ids = []NEWLINE for sg in value:NEWLINE sg_ids.append(sg['SecurityGroupId'])NEWLINE host_info["ec2_security_group_ids"] = ','.join([str(i) for i in sg_ids])NEWLINENEWLINE # Target: EverythingNEWLINE # Preserve booleans and integersNEWLINE elif isinstance(value, (int, bool)):NEWLINE host_info[key] = valueNEWLINENEWLINE # Target: EverythingNEWLINE # Sanitize string valuesNEWLINE elif isinstance(value, six.string_types):NEWLINE host_info[key] = value.strip()NEWLINENEWLINE # Target: EverythingNEWLINE # Replace None by an empty stringNEWLINE elif value is None:NEWLINE host_info[key] = ''NEWLINENEWLINE else:NEWLINE # Remove non-processed complex typesNEWLINE passNEWLINENEWLINE return host_infoNEWLINENEWLINE def get_host_info(self):NEWLINE ''' Get variables about a specific host '''NEWLINENEWLINE if len(self.index) == 0:NEWLINE # Need to load index from cacheNEWLINE self.load_index_from_cache()NEWLINENEWLINE if self.args.host not in self.index:NEWLINE # try updating the cacheNEWLINE self.do_api_calls_update_cache()NEWLINE if self.args.host not in self.index:NEWLINE # host might not exist anymoreNEWLINE return self.json_format_dict({}, True)NEWLINENEWLINE (region, instance_id) = self.index[self.args.host]NEWLINENEWLINE instance = self.get_instance(region, instance_id)NEWLINE return self.json_format_dict(self.get_host_info_dict_from_instance(instance), True)NEWLINENEWLINE def push(self, my_dict, key, element):NEWLINE ''' Push an element onto an array that may not have been defined inNEWLINE the dict '''NEWLINE group_info = my_dict.setdefault(key, [])NEWLINE if isinstance(group_info, dict):NEWLINE host_list = group_info.setdefault('hosts', [])NEWLINE host_list.append(element)NEWLINE else:NEWLINE group_info.append(element)NEWLINENEWLINE def push_group(self, my_dict, key, element):NEWLINE ''' Push a group as a child of another group. '''NEWLINE parent_group = my_dict.setdefault(key, {})NEWLINE if not isinstance(parent_group, dict):NEWLINE parent_group = my_dict[key] = {'hosts': parent_group}NEWLINE child_groups = parent_group.setdefault('children', [])NEWLINE if element not in child_groups:NEWLINE child_groups.append(element)NEWLINENEWLINE def get_inventory_from_cache(self):NEWLINE ''' Reads the inventory from the cache file and returns it as a JSONNEWLINE object '''NEWLINENEWLINE with open(self.cache_path_cache, 'r') as f:NEWLINE json_inventory = f.read()NEWLINE return json_inventoryNEWLINENEWLINE def load_index_from_cache(self):NEWLINE ''' Reads the index from the cache file sets self.index '''NEWLINENEWLINE with open(self.cache_path_index, 'rb') as f:NEWLINE self.index = json.load(f)NEWLINENEWLINE def write_to_cache(self, data, filename):NEWLINE ''' Writes data in JSON format to a file '''NEWLINENEWLINE json_data = self.json_format_dict(data, True)NEWLINE with open(filename, 'w') as f:NEWLINE f.write(json_data)NEWLINENEWLINE def uncammelize(self, key):NEWLINE temp = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', key)NEWLINE return re.sub('([a-z0-9])([A-Z])', r'\1_\2', temp).lower()NEWLINENEWLINE def to_safe(self, word):NEWLINE ''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups '''NEWLINE regex = "[^A-Za-z0-9\_"NEWLINE if not self.replace_dash_in_groups:NEWLINE regex += "\-"NEWLINE return re.sub(regex + "]", "_", word)NEWLINENEWLINE def json_format_dict(self, data, pretty=False):NEWLINE ''' Converts a dict to a JSON object and dumps it as a formattedNEWLINE string '''NEWLINENEWLINE if pretty:NEWLINE return json.dumps(data, sort_keys=True, indent=2)NEWLINE else:NEWLINE return json.dumps(data)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE # Run the scriptNEWLINE Ec2Inventory()NEWLINE
# -*- coding: utf-8 -*-NEWLINE"""NEWLINE 'accounts' resource and schema settings.NEWLINENEWLINE :copyright: (c) 2014 by Nicola Iarocci and CIR2000.NEWLINE :license: BSD, see LICENSE for more details.NEWLINE"""NEWLINEfrom common import base_schema, required_stringNEWLINENEWLINE_schema = {NEWLINE 'u': required_string, # usernameNEWLINE 'p': required_string, # passwordNEWLINE 't': required_string, # tokenNEWLINE 'r': { # roleNEWLINE 'type': 'list',NEWLINE 'allowed': ['admin', 'app', 'user'],NEWLINE 'required': True,NEWLINE }NEWLINE}NEWLINENEWLINEdefinition = {NEWLINE 'url': 'accounts',NEWLINE 'item_title': 'account',NEWLINE # only admins and apps are allowed to consume this endpoint.NEWLINE 'allowed_roles': ['admin', 'app'],NEWLINE 'cache_control': '',NEWLINE 'cache_expires': 0,NEWLINE 'additional_lookup': {NEWLINE 'url': 'regex("[\w]+")', # to be uniqueNEWLINE 'field': 'u'NEWLINE },NEWLINE 'schema': _schema,NEWLINE}NEWLINEdefinition.update(base_schema)NEWLINE
# Copyright 2018 Capital One Services, LLCNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEfrom c7n_azure.provider import resourcesNEWLINEfrom c7n_azure.resources.arm import ArmResourceManagerNEWLINENEWLINENEWLINE@resources.register('vnet')NEWLINEclass Vnet(ArmResourceManager):NEWLINENEWLINE class resource_type(ArmResourceManager.resource_type):NEWLINE service = 'azure.mgmt.network'NEWLINE client = 'NetworkManagementClient'NEWLINE enum_spec = ('virtual_networks', 'list_all', None)NEWLINE resource_type = 'Microsoft.Network/virtualNetworks'NEWLINE
#!/usr/bin/env pythonNEWLINE#NEWLINE# Electrum - lightweight Bitcoin clientNEWLINE# Copyright (C) 2012 thomasv@gitoriousNEWLINE#NEWLINE# Permission is hereby granted, free of charge, to any personNEWLINE# obtaining a copy of this software and associated documentation filesNEWLINE# (the "Software"), to deal in the Software without restriction,NEWLINE# including without limitation the rights to use, copy, modify, merge,NEWLINE# publish, distribute, sublicense, and/or sell copies of the Software,NEWLINE# and to permit persons to whom the Software is furnished to do so,NEWLINE# subject to the following conditions:NEWLINE#NEWLINE# The above copyright notice and this permission notice shall beNEWLINE# included in all copies or substantial portions of the Software.NEWLINE#NEWLINE# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,NEWLINE# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OFNEWLINE# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE ANDNEWLINE# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERSNEWLINE# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN ANNEWLINE# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR INNEWLINE# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THENEWLINE# SOFTWARE.NEWLINEimport sysNEWLINEimport timeNEWLINEimport threadingNEWLINEimport osNEWLINEimport tracebackNEWLINEimport jsonNEWLINEimport shutilNEWLINEimport weakrefNEWLINEimport csvNEWLINEfrom decimal import DecimalNEWLINEimport base64NEWLINEfrom functools import partialNEWLINEimport queueNEWLINEimport asyncioNEWLINEfrom typing import OptionalNEWLINENEWLINEfrom PyQt5.QtGui import QPixmap, QKeySequence, QIcon, QCursorNEWLINEfrom PyQt5.QtCore import Qt, QRect, QStringListModel, QSize, pyqtSignalNEWLINEfrom PyQt5.QtWidgets import (QMessageBox, QComboBox, QSystemTrayIcon, QTabWidget,NEWLINE QSpinBox, QMenuBar, QFileDialog, QCheckBox, QLabel,NEWLINE QVBoxLayout, QGridLayout, QLineEdit, QTreeWidgetItem,NEWLINE QHBoxLayout, QPushButton, QScrollArea, QTextEdit,NEWLINE QShortcut, QMainWindow, QCompleter, QInputDialog,NEWLINE QWidget, QMenu, QSizePolicy, QStatusBar)NEWLINENEWLINEimport electrumNEWLINEfrom electrum import (keystore, simple_config, ecc, constants, util, bitcoin, commands,NEWLINE coinchooser, paymentrequest)NEWLINEfrom electrum.bitcoin import COIN, is_address, TYPE_ADDRESSNEWLINEfrom electrum.plugin import run_hookNEWLINEfrom electrum.i18n import _NEWLINEfrom electrum.util import (format_time, format_satoshis, format_fee_satoshis,NEWLINE format_satoshis_plain, NotEnoughFunds,NEWLINE UserCancelled, NoDynamicFeeEstimates, profiler,NEWLINE export_meta, import_meta, bh2u, bfh, InvalidPassword,NEWLINE base_units, base_units_list, base_unit_name_to_decimal_point,NEWLINE decimal_point_to_base_unit_name, quantize_feerate,NEWLINE UnknownBaseUnit, DECIMAL_POINT_DEFAULT, UserFacingException,NEWLINE get_new_wallet_name, send_exception_to_crash_reporter,NEWLINE InvalidBitcoinURI)NEWLINEfrom electrum.transaction import Transaction, TxOutputNEWLINEfrom electrum.address_synchronizer import AddTransactionExceptionNEWLINEfrom electrum.wallet import (Multisig_Wallet, CannotBumpFee, Abstract_Wallet,NEWLINE sweep_preparations, InternalAddressCorruption)NEWLINEfrom electrum.version import ELECTRUM_VERSIONNEWLINEfrom electrum.network import Network, TxBroadcastError, BestEffortRequestFailedNEWLINEfrom electrum.exchange_rate import FxThreadNEWLINEfrom electrum.simple_config import SimpleConfigNEWLINEfrom electrum.logging import LoggerNEWLINEfrom electrum.paymentrequest import PR_PAIDNEWLINENEWLINEfrom .exception_window import Exception_HookNEWLINEfrom .amountedit import AmountEdit, BTCAmountEdit, MyLineEdit, FeerateEditNEWLINEfrom .qrcodewidget import QRCodeWidget, QRDialogNEWLINEfrom .qrtextedit import ShowQRTextEdit, ScanQRTextEditNEWLINEfrom .transaction_dialog import show_transactionNEWLINEfrom .fee_slider import FeeSliderNEWLINEfrom .util import (read_QIcon, ColorScheme, text_dialog, icon_path, WaitingDialog,NEWLINE WindowModalDialog, ChoicesLayout, HelpLabel, FromList, Buttons,NEWLINE OkButton, InfoButton, WWLabel, TaskThread, CancelButton,NEWLINE CloseButton, HelpButton, MessageBoxMixin, EnterButton, expiration_values,NEWLINE ButtonsLineEdit, CopyCloseButton, import_meta_gui, export_meta_gui,NEWLINE filename_field, address_field, char_width_in_lineedit, webopen)NEWLINEfrom .installwizard import WIF_HELP_TEXTNEWLINEfrom .history_list import HistoryList, HistoryModelNEWLINEfrom .update_checker import UpdateCheck, UpdateCheckThreadNEWLINENEWLINENEWLINEclass StatusBarButton(QPushButton):NEWLINE def __init__(self, icon, tooltip, func):NEWLINE QPushButton.__init__(self, icon, '')NEWLINE self.setToolTip(tooltip)NEWLINE self.setFlat(True)NEWLINE self.setMaximumWidth(25)NEWLINE self.clicked.connect(self.onPress)NEWLINE self.func = funcNEWLINE self.setIconSize(QSize(25,25))NEWLINE self.setCursor(QCursor(Qt.PointingHandCursor))NEWLINENEWLINE def onPress(self, checked=False):NEWLINE '''Drops the unwanted PyQt5 "checked" argument'''NEWLINE self.func()NEWLINENEWLINE def keyPressEvent(self, e):NEWLINE if e.key() == Qt.Key_Return:NEWLINE self.func()NEWLINENEWLINENEWLINEclass ElectrumWindow(QMainWindow, MessageBoxMixin, Logger):NEWLINENEWLINE payment_request_ok_signal = pyqtSignal()NEWLINE payment_request_error_signal = pyqtSignal()NEWLINE new_fx_quotes_signal = pyqtSignal()NEWLINE new_fx_history_signal = pyqtSignal()NEWLINE network_signal = pyqtSignal(str, object)NEWLINE alias_received_signal = pyqtSignal()NEWLINE computing_privkeys_signal = pyqtSignal()NEWLINE show_privkeys_signal = pyqtSignal()NEWLINENEWLINE def __init__(self, gui_object, wallet: Abstract_Wallet):NEWLINE QMainWindow.__init__(self)NEWLINENEWLINE self.gui_object = gui_objectNEWLINE self.config = config = gui_object.config # type: SimpleConfigNEWLINE self.gui_thread = gui_object.gui_threadNEWLINENEWLINE self.setup_exception_hook()NEWLINENEWLINE self.network = gui_object.daemon.network # type: NetworkNEWLINE assert wallet, "no wallet"NEWLINE self.wallet = walletNEWLINE self.fx = gui_object.daemon.fx # type: FxThreadNEWLINE self.invoices = wallet.invoicesNEWLINE self.contacts = wallet.contactsNEWLINE self.tray = gui_object.trayNEWLINE self.app = gui_object.appNEWLINE self.cleaned_up = FalseNEWLINE self.payment_request = None # type: Optional[paymentrequest.PaymentRequest]NEWLINE self.checking_accounts = FalseNEWLINE self.qr_window = NoneNEWLINE self.not_enough_funds = FalseNEWLINE self.pluginsdialog = NoneNEWLINE self.require_fee_update = FalseNEWLINE self.tl_windows = []NEWLINE self.tx_external_keypairs = {}NEWLINE Logger.__init__(self)NEWLINENEWLINE self.tx_notification_queue = queue.Queue()NEWLINE self.tx_notification_last_time = 0NEWLINENEWLINE self.create_status_bar()NEWLINE self.need_update = threading.Event()NEWLINENEWLINE self.decimal_point = config.get('decimal_point', DECIMAL_POINT_DEFAULT)NEWLINE try:NEWLINE decimal_point_to_base_unit_name(self.decimal_point)NEWLINE except UnknownBaseUnit:NEWLINE self.decimal_point = DECIMAL_POINT_DEFAULTNEWLINE self.num_zeros = int(config.get('num_zeros', 0))NEWLINENEWLINE self.completions = QStringListModel()NEWLINENEWLINE self.tabs = tabs = QTabWidget(self)NEWLINE self.send_tab = self.create_send_tab()NEWLINE self.receive_tab = self.create_receive_tab()NEWLINE self.addresses_tab = self.create_addresses_tab()NEWLINE self.utxo_tab = self.create_utxo_tab()NEWLINE self.console_tab = self.create_console_tab()NEWLINE self.contacts_tab = self.create_contacts_tab()NEWLINE tabs.addTab(self.create_history_tab(), read_QIcon("tab_history.png"), _('History'))NEWLINE tabs.addTab(self.send_tab, read_QIcon("tab_send.png"), _('Send'))NEWLINE tabs.addTab(self.receive_tab, read_QIcon("tab_receive.png"), _('Receive'))NEWLINENEWLINE def add_optional_tab(tabs, tab, icon, description, name):NEWLINE tab.tab_icon = iconNEWLINE tab.tab_description = descriptionNEWLINE tab.tab_pos = len(tabs)NEWLINE tab.tab_name = nameNEWLINE if self.config.get('show_{}_tab'.format(name), False):NEWLINE tabs.addTab(tab, icon, description.replace("&", ""))NEWLINENEWLINE add_optional_tab(tabs, self.addresses_tab, read_QIcon("tab_addresses.png"), _("&Addresses"), "addresses")NEWLINE add_optional_tab(tabs, self.utxo_tab, read_QIcon("tab_coins.png"), _("Co&ins"), "utxo")NEWLINE add_optional_tab(tabs, self.contacts_tab, read_QIcon("tab_contacts.png"), _("Con&tacts"), "contacts")NEWLINE add_optional_tab(tabs, self.console_tab, read_QIcon("tab_console.png"), _("Con&sole"), "console")NEWLINENEWLINE tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)NEWLINE self.setCentralWidget(tabs)NEWLINENEWLINE if self.config.get("is_maximized"):NEWLINE self.showMaximized()NEWLINENEWLINE self.setWindowIcon(read_QIcon("electrum.png"))NEWLINE self.init_menubar()NEWLINENEWLINE wrtabs = weakref.proxy(tabs)NEWLINE QShortcut(QKeySequence("Ctrl+W"), self, self.close)NEWLINE QShortcut(QKeySequence("Ctrl+Q"), self, self.close)NEWLINE QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)NEWLINE QShortcut(QKeySequence("F5"), self, self.update_wallet)NEWLINE QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() - 1)%wrtabs.count()))NEWLINE QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: wrtabs.setCurrentIndex((wrtabs.currentIndex() + 1)%wrtabs.count()))NEWLINENEWLINE for i in range(wrtabs.count()):NEWLINE QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: wrtabs.setCurrentIndex(i))NEWLINENEWLINE self.payment_request_ok_signal.connect(self.payment_request_ok)NEWLINE self.payment_request_error_signal.connect(self.payment_request_error)NEWLINE self.history_list.setFocus(True)NEWLINENEWLINE # network callbacksNEWLINE if self.network:NEWLINE self.network_signal.connect(self.on_network_qt)NEWLINE interests = ['wallet_updated', 'network_updated', 'blockchain_updated',NEWLINE 'new_transaction', 'status',NEWLINE 'banner', 'verified', 'fee', 'fee_histogram']NEWLINE # To avoid leaking references to "self" that prevent theNEWLINE # window from being GC-ed when closed, callbacks should beNEWLINE # methods of this class only, and specifically not beNEWLINE # partials, lambdas or methods of subobjects. Hence...NEWLINE self.network.register_callback(self.on_network, interests)NEWLINE # set initial messageNEWLINE self.console.showMessage(self.network.banner)NEWLINE self.network.register_callback(self.on_quotes, ['on_quotes'])NEWLINE self.network.register_callback(self.on_history, ['on_history'])NEWLINE self.new_fx_quotes_signal.connect(self.on_fx_quotes)NEWLINE self.new_fx_history_signal.connect(self.on_fx_history)NEWLINENEWLINE # update fee slider in case we missed the callbackNEWLINE self.fee_slider.update()NEWLINE self.load_wallet(wallet)NEWLINE gui_object.timer.timeout.connect(self.timer_actions)NEWLINE self.fetch_alias()NEWLINENEWLINE # If the option hasn't been set yetNEWLINE if config.get('check_updates') is None:NEWLINE choice = self.question(title="Electrum - " + _("Enable update check"),NEWLINE msg=_("For security reasons we advise that you always use the latest version of Electrum.") + " " +NEWLINE _("Would you like to be notified when there is a newer version of Electrum available?"))NEWLINE config.set_key('check_updates', bool(choice), save=True)NEWLINENEWLINE if config.get('check_updates', False):NEWLINE # The references to both the thread and the window need to be stored somewhereNEWLINE # to prevent GC from getting in our way.NEWLINE def on_version_received(v):NEWLINE if UpdateCheck.is_newer(v):NEWLINE self.update_check_button.setText(_("Update to Electrum {} is available").format(v))NEWLINE self.update_check_button.clicked.connect(lambda: self.show_update_check(v))NEWLINE self.update_check_button.show()NEWLINE self._update_check_thread = UpdateCheckThread(self)NEWLINE self._update_check_thread.checked.connect(on_version_received)NEWLINE self._update_check_thread.start()NEWLINENEWLINE def on_history(self, b):NEWLINE self.wallet.clear_coin_price_cache()NEWLINE self.new_fx_history_signal.emit()NEWLINENEWLINE def setup_exception_hook(self):NEWLINE Exception_Hook(self)NEWLINENEWLINE def on_fx_history(self):NEWLINE self.history_model.refresh('fx_history')NEWLINE self.address_list.update()NEWLINENEWLINE def on_quotes(self, b):NEWLINE self.new_fx_quotes_signal.emit()NEWLINENEWLINE def on_fx_quotes(self):NEWLINE self.update_status()NEWLINE # Refresh edits with the new rateNEWLINE edit = self.fiat_send_e if self.fiat_send_e.is_last_edited else self.amount_eNEWLINE edit.textEdited.emit(edit.text())NEWLINE edit = self.fiat_receive_e if self.fiat_receive_e.is_last_edited else self.receive_amount_eNEWLINE edit.textEdited.emit(edit.text())NEWLINE # History tab needs updating if it used spotNEWLINE if self.fx.history_used_spot:NEWLINE self.history_model.refresh('fx_quotes')NEWLINE self.address_list.update()NEWLINENEWLINE def toggle_tab(self, tab):NEWLINE show = not self.config.get('show_{}_tab'.format(tab.tab_name), False)NEWLINE self.config.set_key('show_{}_tab'.format(tab.tab_name), show)NEWLINE item_text = (_("Hide {}") if show else _("Show {}")).format(tab.tab_description)NEWLINE tab.menu_action.setText(item_text)NEWLINE if show:NEWLINE # Find out where to place the tabNEWLINE index = len(self.tabs)NEWLINE for i in range(len(self.tabs)):NEWLINE try:NEWLINE if tab.tab_pos < self.tabs.widget(i).tab_pos:NEWLINE index = iNEWLINE breakNEWLINE except AttributeError:NEWLINE passNEWLINE self.tabs.insertTab(index, tab, tab.tab_icon, tab.tab_description.replace("&", ""))NEWLINE else:NEWLINE i = self.tabs.indexOf(tab)NEWLINE self.tabs.removeTab(i)NEWLINENEWLINE def push_top_level_window(self, window):NEWLINE '''Used for e.g. tx dialog box to ensure new dialogs are appropriatelyNEWLINE parented. This used to be done by explicitly providing the parentNEWLINE window, but that isn't something hardware wallet prompts know.'''NEWLINE self.tl_windows.append(window)NEWLINENEWLINE def pop_top_level_window(self, window):NEWLINE self.tl_windows.remove(window)NEWLINENEWLINE def top_level_window(self, test_func=None):NEWLINE '''Do the right thing in the presence of tx dialog windows'''NEWLINE override = self.tl_windows[-1] if self.tl_windows else NoneNEWLINE if override and test_func and not test_func(override):NEWLINE override = None # only override if ok for test_funcNEWLINE return self.top_level_window_recurse(override, test_func)NEWLINENEWLINE def diagnostic_name(self):NEWLINE #return '{}:{}'.format(self.__class__.__name__, self.wallet.diagnostic_name())NEWLINE return self.wallet.diagnostic_name()NEWLINENEWLINE def is_hidden(self):NEWLINE return self.isMinimized() or self.isHidden()NEWLINENEWLINE def show_or_hide(self):NEWLINE if self.is_hidden():NEWLINE self.bring_to_top()NEWLINE else:NEWLINE self.hide()NEWLINENEWLINE def bring_to_top(self):NEWLINE self.show()NEWLINE self.raise_()NEWLINENEWLINE def on_error(self, exc_info):NEWLINE e = exc_info[1]NEWLINE if isinstance(e, UserCancelled):NEWLINE passNEWLINE elif isinstance(e, UserFacingException):NEWLINE self.show_error(str(e))NEWLINE else:NEWLINE try:NEWLINE self.logger.error("on_error", exc_info=exc_info)NEWLINE except OSError:NEWLINE pass # see #4418NEWLINE self.show_error(repr(e))NEWLINENEWLINE def on_network(self, event, *args):NEWLINE if event == 'wallet_updated':NEWLINE wallet = args[0]NEWLINE if wallet == self.wallet:NEWLINE self.need_update.set()NEWLINE elif event == 'network_updated':NEWLINE self.gui_object.network_updated_signal_obj.network_updated_signal \NEWLINE .emit(event, args)NEWLINE self.network_signal.emit('status', None)NEWLINE elif event == 'blockchain_updated':NEWLINE # to update number of confirmations in historyNEWLINE self.need_update.set()NEWLINE elif event == 'new_transaction':NEWLINE wallet, tx = argsNEWLINE if wallet == self.wallet:NEWLINE self.tx_notification_queue.put(tx)NEWLINE elif event in ['status', 'banner', 'verified', 'fee', 'fee_histogram']:NEWLINE # Handle in GUI threadNEWLINE self.network_signal.emit(event, args)NEWLINE else:NEWLINE self.logger.info(f"unexpected network message: {event} {args}")NEWLINENEWLINE def on_network_qt(self, event, args=None):NEWLINE # Handle a network message in the GUI threadNEWLINE if event == 'status':NEWLINE self.update_status()NEWLINE elif event == 'banner':NEWLINE self.console.showMessage(args[0])NEWLINE elif event == 'verified':NEWLINE wallet, tx_hash, tx_mined_status = argsNEWLINE if wallet == self.wallet:NEWLINE self.history_model.update_tx_mined_status(tx_hash, tx_mined_status)NEWLINE elif event == 'fee':NEWLINE if self.config.is_dynfee():NEWLINE self.fee_slider.update()NEWLINE self.require_fee_update = TrueNEWLINE elif event == 'fee_histogram':NEWLINE if self.config.is_dynfee():NEWLINE self.fee_slider.update()NEWLINE self.require_fee_update = TrueNEWLINE self.history_model.on_fee_histogram()NEWLINE else:NEWLINE self.logger.info(f"unexpected network_qt signal: {event} {args}")NEWLINENEWLINE def fetch_alias(self):NEWLINE self.alias_info = NoneNEWLINE alias = self.config.get('alias')NEWLINE if alias:NEWLINE alias = str(alias)NEWLINE def f():NEWLINE self.alias_info = self.contacts.resolve_openalias(alias)NEWLINE self.alias_received_signal.emit()NEWLINE t = threading.Thread(target=f)NEWLINE t.setDaemon(True)NEWLINE t.start()NEWLINENEWLINE def close_wallet(self):NEWLINE if self.wallet:NEWLINE self.logger.info(f'close_wallet {self.wallet.storage.path}')NEWLINE run_hook('close_wallet', self.wallet)NEWLINENEWLINE @profilerNEWLINE def load_wallet(self, wallet):NEWLINE wallet.thread = TaskThread(self, self.on_error)NEWLINE self.update_recently_visited(wallet.storage.path)NEWLINE self.need_update.set()NEWLINE # Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initializedNEWLINE # update menusNEWLINE self.seed_menu.setEnabled(self.wallet.has_seed())NEWLINE self.update_lock_icon()NEWLINE self.update_buttons_on_seed()NEWLINE self.update_console()NEWLINE self.clear_receive_tab()NEWLINE self.request_list.update()NEWLINE self.tabs.show()NEWLINE self.init_geometry()NEWLINE if self.config.get('hide_gui') and self.gui_object.tray.isVisible():NEWLINE self.hide()NEWLINE else:NEWLINE self.show()NEWLINE self.watching_only_changed()NEWLINE run_hook('load_wallet', wallet, self)NEWLINE try:NEWLINE wallet.try_detecting_internal_addresses_corruption()NEWLINE except InternalAddressCorruption as e:NEWLINE self.show_error(str(e))NEWLINE send_exception_to_crash_reporter(e)NEWLINENEWLINE def init_geometry(self):NEWLINE winpos = self.wallet.storage.get("winpos-qt")NEWLINE try:NEWLINE screen = self.app.desktop().screenGeometry()NEWLINE assert screen.contains(QRect(*winpos))NEWLINE self.setGeometry(*winpos)NEWLINE except:NEWLINE self.logger.info("using default geometry")NEWLINE self.setGeometry(100, 100, 840, 400)NEWLINENEWLINE def watching_only_changed(self):NEWLINE name = "Electrum Testnet" if constants.net.TESTNET else "Electrum"NEWLINE title = '%s %s - %s' % (name, ELECTRUM_VERSION,NEWLINE self.wallet.basename())NEWLINE extra = [self.wallet.storage.get('wallet_type', '?')]NEWLINE if self.wallet.is_watching_only():NEWLINE extra.append(_('watching only'))NEWLINE title += ' [%s]'% ', '.join(extra)NEWLINE self.setWindowTitle(title)NEWLINE self.password_menu.setEnabled(self.wallet.may_have_password())NEWLINE self.import_privkey_menu.setVisible(self.wallet.can_import_privkey())NEWLINE self.import_address_menu.setVisible(self.wallet.can_import_address())NEWLINE self.export_menu.setEnabled(self.wallet.can_export())NEWLINENEWLINE def warn_if_watching_only(self):NEWLINE if self.wallet.is_watching_only():NEWLINE msg = ' '.join([NEWLINE _("This wallet is watching-only."),NEWLINE _("This means you will not be able to spend Bitcoins with it."),NEWLINE _("Make sure you own the seed phrase or the private keys, before you request Bitcoins to be sent to this wallet.")NEWLINE ])NEWLINE self.show_warning(msg, title=_('Watch-only wallet'))NEWLINENEWLINE def warn_if_testnet(self):NEWLINE if not constants.net.TESTNET:NEWLINE returnNEWLINE # user might have opted out alreadyNEWLINE if self.config.get('dont_show_testnet_warning', False):NEWLINE returnNEWLINE # only show once per process lifecycleNEWLINE if getattr(self.gui_object, '_warned_testnet', False):NEWLINE returnNEWLINE self.gui_object._warned_testnet = TrueNEWLINE msg = ''.join([NEWLINE _("You are in testnet mode."), ' ',NEWLINE _("Testnet coins are worthless."), '\n',NEWLINE _("Testnet is separate from the main Bitcoin network. It is used for testing.")NEWLINE ])NEWLINE cb = QCheckBox(_("Don't show this again."))NEWLINE cb_checked = FalseNEWLINE def on_cb(x):NEWLINE nonlocal cb_checkedNEWLINE cb_checked = x == Qt.CheckedNEWLINE cb.stateChanged.connect(on_cb)NEWLINE self.show_warning(msg, title=_('Testnet'), checkbox=cb)NEWLINE if cb_checked:NEWLINE self.config.set_key('dont_show_testnet_warning', True)NEWLINENEWLINE def open_wallet(self):NEWLINE try:NEWLINE wallet_folder = self.get_wallet_folder()NEWLINE except FileNotFoundError as e:NEWLINE self.show_error(str(e))NEWLINE returnNEWLINE filename, __ = QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder)NEWLINE if not filename:NEWLINE returnNEWLINE self.gui_object.new_window(filename)NEWLINENEWLINENEWLINE def backup_wallet(self):NEWLINE path = self.wallet.storage.pathNEWLINE wallet_folder = os.path.dirname(path)NEWLINE filename, __ = QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder)NEWLINE if not filename:NEWLINE returnNEWLINE new_path = os.path.join(wallet_folder, filename)NEWLINE if new_path != path:NEWLINE try:NEWLINE shutil.copy2(path, new_path)NEWLINE self.show_message(_("A copy of your wallet file was created in")+" '%s'" % str(new_path), title=_("Wallet backup created"))NEWLINE except BaseException as reason:NEWLINE self.show_critical(_("Electrum was unable to copy your wallet file to the specified location.") + "\n" + str(reason), title=_("Unable to create backup"))NEWLINENEWLINE def update_recently_visited(self, filename):NEWLINE recent = self.config.get('recently_open', [])NEWLINE try:NEWLINE sorted(recent)NEWLINE except:NEWLINE recent = []NEWLINE if filename in recent:NEWLINE recent.remove(filename)NEWLINE recent.insert(0, filename)NEWLINE recent = [path for path in recent if os.path.exists(path)]NEWLINE recent = recent[:5]NEWLINE self.config.set_key('recently_open', recent)NEWLINE self.recently_visited_menu.clear()NEWLINE for i, k in enumerate(sorted(recent)):NEWLINE b = os.path.basename(k)NEWLINE def loader(k):NEWLINE return lambda: self.gui_object.new_window(k)NEWLINE self.recently_visited_menu.addAction(b, loader(k)).setShortcut(QKeySequence("Ctrl+%d"%(i+1)))NEWLINE self.recently_visited_menu.setEnabled(len(recent))NEWLINENEWLINE def get_wallet_folder(self):NEWLINE return os.path.dirname(os.path.abspath(self.config.get_wallet_path()))NEWLINENEWLINE def new_wallet(self):NEWLINE try:NEWLINE wallet_folder = self.get_wallet_folder()NEWLINE except FileNotFoundError as e:NEWLINE self.show_error(str(e))NEWLINE returnNEWLINE filename = get_new_wallet_name(wallet_folder)NEWLINE full_path = os.path.join(wallet_folder, filename)NEWLINE self.gui_object.start_new_window(full_path, None)NEWLINENEWLINE def init_menubar(self):NEWLINE menubar = QMenuBar()NEWLINENEWLINE file_menu = menubar.addMenu(_("&File"))NEWLINE self.recently_visited_menu = file_menu.addMenu(_("&Recently open"))NEWLINE file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)NEWLINE file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)NEWLINE file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)NEWLINE file_menu.addAction(_("Delete"), self.remove_wallet)NEWLINE file_menu.addSeparator()NEWLINE file_menu.addAction(_("&Quit"), self.close)NEWLINENEWLINE wallet_menu = menubar.addMenu(_("&Wallet"))NEWLINE wallet_menu.addAction(_("&Information"), self.show_master_public_keys)NEWLINE wallet_menu.addSeparator()NEWLINE self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)NEWLINE self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)NEWLINE self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))NEWLINE self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)NEWLINE self.import_privkey_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)NEWLINE self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)NEWLINE self.import_address_menu = wallet_menu.addAction(_("Import addresses"), self.import_addresses)NEWLINE wallet_menu.addSeparator()NEWLINENEWLINE addresses_menu = wallet_menu.addMenu(_("&Addresses"))NEWLINE addresses_menu.addAction(_("&Filter"), lambda: self.address_list.toggle_toolbar(self.config))NEWLINE labels_menu = wallet_menu.addMenu(_("&Labels"))NEWLINE labels_menu.addAction(_("&Import"), self.do_import_labels)NEWLINE labels_menu.addAction(_("&Export"), self.do_export_labels)NEWLINE history_menu = wallet_menu.addMenu(_("&History"))NEWLINE history_menu.addAction(_("&Filter"), lambda: self.history_list.toggle_toolbar(self.config))NEWLINE history_menu.addAction(_("&Summary"), self.history_list.show_summary)NEWLINE history_menu.addAction(_("&Plot"), self.history_list.plot_history_dialog)NEWLINE history_menu.addAction(_("&Export"), self.history_list.export_history_dialog)NEWLINE contacts_menu = wallet_menu.addMenu(_("Contacts"))NEWLINE contacts_menu.addAction(_("&New"), self.new_contact_dialog)NEWLINE contacts_menu.addAction(_("Import"), lambda: self.contact_list.import_contacts())NEWLINE contacts_menu.addAction(_("Export"), lambda: self.contact_list.export_contacts())NEWLINE invoices_menu = wallet_menu.addMenu(_("Invoices"))NEWLINE invoices_menu.addAction(_("Import"), lambda: self.invoice_list.import_invoices())NEWLINE invoices_menu.addAction(_("Export"), lambda: self.invoice_list.export_invoices())NEWLINENEWLINE wallet_menu.addSeparator()NEWLINE wallet_menu.addAction(_("Find"), self.toggle_search).setShortcut(QKeySequence("Ctrl+F"))NEWLINENEWLINE def add_toggle_action(view_menu, tab):NEWLINE is_shown = self.config.get('show_{}_tab'.format(tab.tab_name), False)NEWLINE item_name = (_("Hide") if is_shown else _("Show")) + " " + tab.tab_descriptionNEWLINE tab.menu_action = view_menu.addAction(item_name, lambda: self.toggle_tab(tab))NEWLINENEWLINE view_menu = menubar.addMenu(_("&View"))NEWLINE add_toggle_action(view_menu, self.addresses_tab)NEWLINE add_toggle_action(view_menu, self.utxo_tab)NEWLINE add_toggle_action(view_menu, self.contacts_tab)NEWLINE add_toggle_action(view_menu, self.console_tab)NEWLINENEWLINE tools_menu = menubar.addMenu(_("&Tools"))NEWLINENEWLINE # Settings / Preferences are all reserved keywords in macOS using this as work aroundNEWLINE tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)NEWLINE tools_menu.addAction(_("&Network"), lambda: self.gui_object.show_network_dialog(self))NEWLINE tools_menu.addAction(_("&Plugins"), self.plugins_dialog)NEWLINE tools_menu.addSeparator()NEWLINE tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)NEWLINE tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)NEWLINE tools_menu.addSeparator()NEWLINENEWLINE paytomany_menu = tools_menu.addAction(_("&Pay to many"), self.paytomany)NEWLINENEWLINE raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))NEWLINE raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)NEWLINE raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)NEWLINE raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)NEWLINE raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode)NEWLINE self.raw_transaction_menu = raw_transaction_menuNEWLINE run_hook('init_menubar_tools', self, tools_menu)NEWLINENEWLINE help_menu = menubar.addMenu(_("&Help"))NEWLINE help_menu.addAction(_("&About"), self.show_about)NEWLINE help_menu.addAction(_("&Check for updates"), self.show_update_check)NEWLINE help_menu.addAction(_("&Official website"), lambda: webopen("https://electrum.org"))NEWLINE help_menu.addSeparator()NEWLINE help_menu.addAction(_("&Documentation"), lambda: webopen("http://docs.electrum.org/")).setShortcut(QKeySequence.HelpContents)NEWLINE help_menu.addAction(_("&Report Bug"), self.show_report_bug)NEWLINE help_menu.addSeparator()NEWLINE help_menu.addAction(_("&Donate to server"), self.donate_to_server)NEWLINENEWLINE self.setMenuBar(menubar)NEWLINENEWLINE def donate_to_server(self):NEWLINE d = self.network.get_donation_address()NEWLINE if d:NEWLINE host = self.network.get_parameters().hostNEWLINE self.pay_to_URI('bitcoin:%s?message=donation for %s'%(d, host))NEWLINE else:NEWLINE self.show_error(_('No donation address for this server'))NEWLINENEWLINE def show_about(self):NEWLINE QMessageBox.about(self, "Electrum",NEWLINE (_("Version")+" %s" % ELECTRUM_VERSION + "\n\n" +NEWLINE _("Electrum's focus is speed, with low resource usage and simplifying Bitcoin.") + " " +NEWLINE _("You do not need to perform regular backups, because your wallet can be "NEWLINE "recovered from a secret phrase that you can memorize or write on paper.") + " " +NEWLINE _("Startup times are instant because it operates in conjunction with high-performance "NEWLINE "servers that handle the most complicated parts of the Bitcoin system.") + "\n\n" +NEWLINE _("Uses icons from the Icons8 icon pack (icons8.com).")))NEWLINENEWLINE def show_update_check(self, version=None):NEWLINE self.gui_object._update_check = UpdateCheck(self, version)NEWLINENEWLINE def show_report_bug(self):NEWLINE msg = ' '.join([NEWLINE _("Please report any bugs as issues on github:<br/>"),NEWLINE f'''<a href="{constants.GIT_REPO_ISSUES_URL}">{constants.GIT_REPO_ISSUES_URL}</a><br/><br/>''',NEWLINE _("Before reporting a bug, upgrade to the most recent version of Electrum (latest release or git HEAD), and include the version number in your report."),NEWLINE _("Try to explain not only what the bug is, but how it occurs.")NEWLINE ])NEWLINE self.show_message(msg, title="Electrum - " + _("Reporting Bugs"), rich_text=True)NEWLINENEWLINE def notify_transactions(self):NEWLINE if self.tx_notification_queue.qsize() == 0:NEWLINE returnNEWLINE if not self.wallet.up_to_date:NEWLINE return # no notifications while syncingNEWLINE now = time.time()NEWLINE rate_limit = 20 # secondsNEWLINE if self.tx_notification_last_time + rate_limit > now:NEWLINE returnNEWLINE self.tx_notification_last_time = nowNEWLINE self.logger.info("Notifying GUI about new transactions")NEWLINE txns = []NEWLINE while True:NEWLINE try:NEWLINE txns.append(self.tx_notification_queue.get_nowait())NEWLINE except queue.Empty:NEWLINE breakNEWLINE # Combine the transactions if there are at least threeNEWLINE if len(txns) >= 3:NEWLINE total_amount = 0NEWLINE for tx in txns:NEWLINE is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)NEWLINE if not is_relevant:NEWLINE continueNEWLINE total_amount += vNEWLINE self.notify(_("{} new transactions: Total amount received in the new transactions {}")NEWLINE .format(len(txns), self.format_amount_and_units(total_amount)))NEWLINE else:NEWLINE for tx in txns:NEWLINE is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)NEWLINE if not is_relevant:NEWLINE continueNEWLINE self.notify(_("New transaction: {}").format(self.format_amount_and_units(v)))NEWLINENEWLINE def notify(self, message):NEWLINE if self.tray:NEWLINE try:NEWLINE # this requires Qt 5.9NEWLINE self.tray.showMessage("Electrum", message, read_QIcon("electrum_dark_icon"), 20000)NEWLINE except TypeError:NEWLINE self.tray.showMessage("Electrum", message, QSystemTrayIcon.Information, 20000)NEWLINENEWLINENEWLINENEWLINE # custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the userNEWLINE def getOpenFileName(self, title, filter = ""):NEWLINE directory = self.config.get('io_dir', os.path.expanduser('~'))NEWLINE fileName, __ = QFileDialog.getOpenFileName(self, title, directory, filter)NEWLINE if fileName and directory != os.path.dirname(fileName):NEWLINE self.config.set_key('io_dir', os.path.dirname(fileName), True)NEWLINE return fileNameNEWLINENEWLINE def getSaveFileName(self, title, filename, filter = ""):NEWLINE directory = self.config.get('io_dir', os.path.expanduser('~'))NEWLINE path = os.path.join( directory, filename )NEWLINE fileName, __ = QFileDialog.getSaveFileName(self, title, path, filter)NEWLINE if fileName and directory != os.path.dirname(fileName):NEWLINE self.config.set_key('io_dir', os.path.dirname(fileName), True)NEWLINE return fileNameNEWLINENEWLINE def timer_actions(self):NEWLINE # Note this runs in the GUI threadNEWLINE if self.need_update.is_set():NEWLINE self.need_update.clear()NEWLINE self.update_wallet()NEWLINE elif not self.wallet.up_to_date:NEWLINE # this updates "synchronizing" progressNEWLINE self.update_status()NEWLINE # resolve aliasesNEWLINE # FIXME this is a blocking network call that has a timeout of 5 secNEWLINE self.payto_e.resolve()NEWLINE # update feeNEWLINE if self.require_fee_update:NEWLINE self.do_update_fee()NEWLINE self.require_fee_update = FalseNEWLINE self.notify_transactions()NEWLINENEWLINE def format_amount(self, x, is_diff=False, whitespaces=False):NEWLINE return format_satoshis(x, self.num_zeros, self.decimal_point, is_diff=is_diff, whitespaces=whitespaces)NEWLINENEWLINE def format_amount_and_units(self, amount):NEWLINE text = self.format_amount(amount) + ' '+ self.base_unit()NEWLINE x = self.fx.format_amount_and_units(amount) if self.fx else NoneNEWLINE if text and x:NEWLINE text += ' (%s)'%xNEWLINE return textNEWLINENEWLINE def format_fee_rate(self, fee_rate):NEWLINE # fee_rate is in sat/kBNEWLINE return format_fee_satoshis(fee_rate/1000, num_zeros=self.num_zeros) + ' sat/byte'NEWLINENEWLINE def get_decimal_point(self):NEWLINE return self.decimal_pointNEWLINENEWLINE def base_unit(self):NEWLINE return decimal_point_to_base_unit_name(self.decimal_point)NEWLINENEWLINE def connect_fields(self, window, btc_e, fiat_e, fee_e):NEWLINENEWLINE def edit_changed(edit):NEWLINE if edit.follows:NEWLINE returnNEWLINE edit.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())NEWLINE fiat_e.is_last_edited = (edit == fiat_e)NEWLINE amount = edit.get_amount()NEWLINE rate = self.fx.exchange_rate() if self.fx else Decimal('NaN')NEWLINE if rate.is_nan() or amount is None:NEWLINE if edit is fiat_e:NEWLINE btc_e.setText("")NEWLINE if fee_e:NEWLINE fee_e.setText("")NEWLINE else:NEWLINE fiat_e.setText("")NEWLINE else:NEWLINE if edit is fiat_e:NEWLINE btc_e.follows = TrueNEWLINE btc_e.setAmount(int(amount / Decimal(rate) * COIN))NEWLINE btc_e.setStyleSheet(ColorScheme.BLUE.as_stylesheet())NEWLINE btc_e.follows = FalseNEWLINE if fee_e:NEWLINE window.update_fee()NEWLINE else:NEWLINE fiat_e.follows = TrueNEWLINE fiat_e.setText(self.fx.ccy_amount_str(NEWLINE amount * Decimal(rate) / COIN, False))NEWLINE fiat_e.setStyleSheet(ColorScheme.BLUE.as_stylesheet())NEWLINE fiat_e.follows = FalseNEWLINENEWLINE btc_e.follows = FalseNEWLINE fiat_e.follows = FalseNEWLINE fiat_e.textChanged.connect(partial(edit_changed, fiat_e))NEWLINE btc_e.textChanged.connect(partial(edit_changed, btc_e))NEWLINE fiat_e.is_last_edited = FalseNEWLINENEWLINE def update_status(self):NEWLINE if not self.wallet:NEWLINE returnNEWLINENEWLINE if self.network is None:NEWLINE text = _("Offline")NEWLINE icon = read_QIcon("status_disconnected.png")NEWLINENEWLINE elif self.network.is_connected():NEWLINE server_height = self.network.get_server_height()NEWLINE server_lag = self.network.get_local_height() - server_heightNEWLINE fork_str = "_fork" if len(self.network.get_blockchains())>1 else ""NEWLINE # Server height can be 0 after switching to a new serverNEWLINE # until we get a headers subscription request response.NEWLINE # Display the synchronizing message in that case.NEWLINE if not self.wallet.up_to_date or server_height == 0:NEWLINE num_sent, num_answered = self.wallet.get_history_sync_state_details()NEWLINE text = ("{} ({}/{})"NEWLINE .format(_("Synchronizing..."), num_answered, num_sent))NEWLINE icon = read_QIcon("status_waiting.png")NEWLINE elif server_lag > 1:NEWLINE text = _("Server is lagging ({} blocks)").format(server_lag)NEWLINE icon = read_QIcon("status_lagging%s.png"%fork_str)NEWLINE else:NEWLINE c, u, x = self.wallet.get_balance()NEWLINE text = _("Balance" ) + ": %s "%(self.format_amount_and_units(c))NEWLINE if u:NEWLINE text += " [%s unconfirmed]"%(self.format_amount(u, is_diff=True).strip())NEWLINE if x:NEWLINE text += " [%s unmatured]"%(self.format_amount(x, is_diff=True).strip())NEWLINENEWLINE # append fiat balance and priceNEWLINE if self.fx.is_enabled():NEWLINE text += self.fx.get_fiat_status_text(c + u + x,NEWLINE self.base_unit(), self.get_decimal_point()) or ''NEWLINE if not self.network.proxy:NEWLINE icon = read_QIcon("status_connected%s.png"%fork_str)NEWLINE else:NEWLINE icon = read_QIcon("status_connected_proxy%s.png"%fork_str)NEWLINE else:NEWLINE if self.network.proxy:NEWLINE text = "{} ({})".format(_("Not connected"), _("proxy enabled"))NEWLINE else:NEWLINE text = _("Not connected")NEWLINE icon = read_QIcon("status_disconnected.png")NEWLINENEWLINE self.tray.setToolTip("%s (%s)" % (text, self.wallet.basename()))NEWLINE self.balance_label.setText(text)NEWLINE self.status_button.setIcon( icon )NEWLINENEWLINE def update_wallet(self):NEWLINE self.update_status()NEWLINE if self.wallet.up_to_date or not self.network or not self.network.is_connected():NEWLINE self.update_tabs()NEWLINENEWLINE def update_tabs(self, wallet=None):NEWLINE if wallet is None:NEWLINE wallet = self.walletNEWLINE if wallet != self.wallet:NEWLINE returnNEWLINE self.history_model.refresh('update_tabs')NEWLINE self.request_list.update()NEWLINE self.address_list.update()NEWLINE self.utxo_list.update()NEWLINE self.contact_list.update()NEWLINE self.invoice_list.update()NEWLINE self.update_completions()NEWLINENEWLINE def create_history_tab(self):NEWLINE self.history_model = HistoryModel(self)NEWLINE self.history_list = l = HistoryList(self, self.history_model)NEWLINE self.history_model.set_view(self.history_list)NEWLINE l.searchable_list = lNEWLINE toolbar = l.create_toolbar(self.config)NEWLINE toolbar_shown = self.config.get('show_toolbar_history', False)NEWLINE l.show_toolbar(toolbar_shown)NEWLINE return self.create_list_tab(l, toolbar)NEWLINENEWLINE def show_address(self, addr):NEWLINE from . import address_dialogNEWLINE d = address_dialog.AddressDialog(self, addr)NEWLINE d.exec_()NEWLINENEWLINE def show_transaction(self, tx, tx_desc = None):NEWLINE '''tx_desc is set only for txs created in the Send tab'''NEWLINE show_transaction(tx, self, tx_desc)NEWLINENEWLINE def create_receive_tab(self):NEWLINE # A 4-column grid layout. All the stretch is in the last column.NEWLINE # The exchange rate plugin adds a fiat widget in column 2NEWLINE self.receive_grid = grid = QGridLayout()NEWLINE grid.setSpacing(8)NEWLINE grid.setColumnStretch(3, 1)NEWLINENEWLINE self.receive_address_e = ButtonsLineEdit()NEWLINE self.receive_address_e.addCopyButton(self.app)NEWLINE self.receive_address_e.setReadOnly(True)NEWLINE msg = _('Bitcoin address where the payment should be received. Note that each payment request uses a different Bitcoin address.')NEWLINE self.receive_address_label = HelpLabel(_('Receiving address'), msg)NEWLINE self.receive_address_e.textChanged.connect(self.update_receive_qr)NEWLINE self.receive_address_e.textChanged.connect(self.update_receive_address_styling)NEWLINE self.receive_address_e.setFocusPolicy(Qt.ClickFocus)NEWLINE grid.addWidget(self.receive_address_label, 0, 0)NEWLINE grid.addWidget(self.receive_address_e, 0, 1, 1, -1)NEWLINENEWLINE self.receive_message_e = QLineEdit()NEWLINE grid.addWidget(QLabel(_('Description')), 1, 0)NEWLINE grid.addWidget(self.receive_message_e, 1, 1, 1, -1)NEWLINE self.receive_message_e.textChanged.connect(self.update_receive_qr)NEWLINENEWLINE self.receive_amount_e = BTCAmountEdit(self.get_decimal_point)NEWLINE grid.addWidget(QLabel(_('Requested amount')), 2, 0)NEWLINE grid.addWidget(self.receive_amount_e, 2, 1)NEWLINE self.receive_amount_e.textChanged.connect(self.update_receive_qr)NEWLINENEWLINE self.fiat_receive_e = AmountEdit(self.fx.get_currency if self.fx else '')NEWLINE if not self.fx or not self.fx.is_enabled():NEWLINE self.fiat_receive_e.setVisible(False)NEWLINE grid.addWidget(self.fiat_receive_e, 2, 2, Qt.AlignLeft)NEWLINE self.connect_fields(self, self.receive_amount_e, self.fiat_receive_e, None)NEWLINENEWLINE self.expires_combo = QComboBox()NEWLINE self.expires_combo.addItems([i[0] for i in expiration_values])NEWLINE self.expires_combo.setCurrentIndex(3)NEWLINE self.expires_combo.setFixedWidth(self.receive_amount_e.width())NEWLINE msg = ' '.join([NEWLINE _('Expiration date of your request.'),NEWLINE _('This information is seen by the recipient if you send them a signed payment request.'),NEWLINE _('Expired requests have to be deleted manually from your list, in order to free the corresponding Bitcoin addresses.'),NEWLINE _('The bitcoin address never expires and will always be part of this electrum wallet.'),NEWLINE ])NEWLINE grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)NEWLINE grid.addWidget(self.expires_combo, 3, 1)NEWLINE self.expires_label = QLineEdit('')NEWLINE self.expires_label.setReadOnly(1)NEWLINE self.expires_label.setFocusPolicy(Qt.NoFocus)NEWLINE self.expires_label.hide()NEWLINE grid.addWidget(self.expires_label, 3, 1)NEWLINENEWLINE self.save_request_button = QPushButton(_('Save'))NEWLINE self.save_request_button.clicked.connect(self.save_payment_request)NEWLINENEWLINE self.new_request_button = QPushButton(_('New'))NEWLINE self.new_request_button.clicked.connect(self.new_payment_request)NEWLINENEWLINE self.receive_qr = QRCodeWidget(fixedSize=200)NEWLINE self.receive_qr.mouseReleaseEvent = lambda x: self.toggle_qr_window()NEWLINE self.receive_qr.enterEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))NEWLINE self.receive_qr.leaveEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))NEWLINENEWLINE self.receive_buttons = buttons = QHBoxLayout()NEWLINE buttons.addStretch(1)NEWLINE buttons.addWidget(self.save_request_button)NEWLINE buttons.addWidget(self.new_request_button)NEWLINE grid.addLayout(buttons, 4, 1, 1, 2)NEWLINENEWLINE self.receive_requests_label = QLabel(_('Requests'))NEWLINENEWLINE from .request_list import RequestListNEWLINE self.request_list = RequestList(self)NEWLINENEWLINE # layoutNEWLINE vbox_g = QVBoxLayout()NEWLINE vbox_g.addLayout(grid)NEWLINE vbox_g.addStretch()NEWLINENEWLINE hbox = QHBoxLayout()NEWLINE hbox.addLayout(vbox_g)NEWLINE hbox.addWidget(self.receive_qr)NEWLINENEWLINE w = QWidget()NEWLINE w.searchable_list = self.request_listNEWLINE vbox = QVBoxLayout(w)NEWLINE vbox.addLayout(hbox)NEWLINE vbox.addStretch(1)NEWLINE vbox.addWidget(self.receive_requests_label)NEWLINE vbox.addWidget(self.request_list)NEWLINE vbox.setStretchFactor(self.request_list, 1000)NEWLINENEWLINE return wNEWLINENEWLINENEWLINE def delete_payment_request(self, addr):NEWLINE self.wallet.remove_payment_request(addr, self.config)NEWLINE self.request_list.update()NEWLINE self.clear_receive_tab()NEWLINENEWLINE def get_request_URI(self, addr):NEWLINE req = self.wallet.receive_requests[addr]NEWLINE message = self.wallet.labels.get(addr, '')NEWLINE amount = req['amount']NEWLINE extra_query_params = {}NEWLINE if req.get('time'):NEWLINE extra_query_params['time'] = str(int(req.get('time')))NEWLINE if req.get('exp'):NEWLINE extra_query_params['exp'] = str(int(req.get('exp')))NEWLINE if req.get('name') and req.get('sig'):NEWLINE sig = bfh(req.get('sig'))NEWLINE sig = bitcoin.base_encode(sig, base=58)NEWLINE extra_query_params['name'] = req['name']NEWLINE extra_query_params['sig'] = sigNEWLINE uri = util.create_bip21_uri(addr, amount, message, extra_query_params=extra_query_params)NEWLINE return str(uri)NEWLINENEWLINENEWLINE def sign_payment_request(self, addr):NEWLINE alias = self.config.get('alias')NEWLINE alias_privkey = NoneNEWLINE if alias and self.alias_info:NEWLINE alias_addr, alias_name, validated = self.alias_infoNEWLINE if alias_addr:NEWLINE if self.wallet.is_mine(alias_addr):NEWLINE msg = _('This payment request will be signed.') + '\n' + _('Please enter your password')NEWLINE password = NoneNEWLINE if self.wallet.has_keystore_encryption():NEWLINE password = self.password_dialog(msg)NEWLINE if not password:NEWLINE returnNEWLINE try:NEWLINE self.wallet.sign_payment_request(addr, alias, alias_addr, password)NEWLINE except Exception as e:NEWLINE self.show_error(repr(e))NEWLINE returnNEWLINE else:NEWLINE returnNEWLINENEWLINE def save_payment_request(self):NEWLINE addr = str(self.receive_address_e.text())NEWLINE amount = self.receive_amount_e.get_amount()NEWLINE message = self.receive_message_e.text()NEWLINE if not message and not amount:NEWLINE self.show_error(_('No message or amount'))NEWLINE return FalseNEWLINE i = self.expires_combo.currentIndex()NEWLINE expiration = list(map(lambda x: x[1], expiration_values))[i]NEWLINE req = self.wallet.make_payment_request(addr, amount, message, expiration)NEWLINE try:NEWLINE self.wallet.add_payment_request(req, self.config)NEWLINE except Exception as e:NEWLINE self.logger.exception('Error adding payment request')NEWLINE self.show_error(_('Error adding payment request') + ':\n' + repr(e))NEWLINE else:NEWLINE self.sign_payment_request(addr)NEWLINE self.save_request_button.setEnabled(False)NEWLINE finally:NEWLINE self.request_list.update()NEWLINE self.address_list.update()NEWLINENEWLINE def view_and_paste(self, title, msg, data):NEWLINE dialog = WindowModalDialog(self, title)NEWLINE vbox = QVBoxLayout()NEWLINE label = QLabel(msg)NEWLINE label.setWordWrap(True)NEWLINE vbox.addWidget(label)NEWLINE pr_e = ShowQRTextEdit(text=data)NEWLINE vbox.addWidget(pr_e)NEWLINE vbox.addLayout(Buttons(CopyCloseButton(pr_e.text, self.app, dialog)))NEWLINE dialog.setLayout(vbox)NEWLINE dialog.exec_()NEWLINENEWLINE def export_payment_request(self, addr):NEWLINE r = self.wallet.receive_requests.get(addr)NEWLINE pr = paymentrequest.serialize_request(r).SerializeToString()NEWLINE name = r['id'] + '.bip70'NEWLINE fileName = self.getSaveFileName(_("Select where to save your payment request"), name, "*.bip70")NEWLINE if fileName:NEWLINE with open(fileName, "wb+") as f:NEWLINE f.write(util.to_bytes(pr))NEWLINE self.show_message(_("Request saved successfully"))NEWLINE self.saved = TrueNEWLINENEWLINE def new_payment_request(self):NEWLINE addr = self.wallet.get_unused_address()NEWLINE if addr is None:NEWLINE if not self.wallet.is_deterministic():NEWLINE msg = [NEWLINE _('No more addresses in your wallet.'),NEWLINE _('You are using a non-deterministic wallet, which cannot create new addresses.'),NEWLINE _('If you want to create new addresses, use a deterministic wallet instead.')NEWLINE ]NEWLINE self.show_message(' '.join(msg))NEWLINE returnNEWLINE if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")):NEWLINE returnNEWLINE addr = self.wallet.create_new_address(False)NEWLINE self.set_receive_address(addr)NEWLINE self.expires_label.hide()NEWLINE self.expires_combo.show()NEWLINE self.new_request_button.setEnabled(False)NEWLINE self.receive_message_e.setFocus(1)NEWLINENEWLINE def set_receive_address(self, addr):NEWLINE self.receive_address_e.setText(addr)NEWLINE self.receive_message_e.setText('')NEWLINE self.receive_amount_e.setAmount(None)NEWLINENEWLINE def clear_receive_tab(self):NEWLINE try:NEWLINE addr = self.wallet.get_receiving_address() or ''NEWLINE except InternalAddressCorruption as e:NEWLINE self.show_error(str(e))NEWLINE addr = ''NEWLINE self.receive_address_e.setText(addr)NEWLINE self.receive_message_e.setText('')NEWLINE self.receive_amount_e.setAmount(None)NEWLINE self.expires_label.hide()NEWLINE self.expires_combo.show()NEWLINENEWLINE def toggle_qr_window(self):NEWLINE from . import qrwindowNEWLINE if not self.qr_window:NEWLINE self.qr_window = qrwindow.QR_Window(self)NEWLINE self.qr_window.setVisible(True)NEWLINE self.qr_window_geometry = self.qr_window.geometry()NEWLINE else:NEWLINE if not self.qr_window.isVisible():NEWLINE self.qr_window.setVisible(True)NEWLINE self.qr_window.setGeometry(self.qr_window_geometry)NEWLINE else:NEWLINE self.qr_window_geometry = self.qr_window.geometry()NEWLINE self.qr_window.setVisible(False)NEWLINE self.update_receive_qr()NEWLINENEWLINE def show_send_tab(self):NEWLINE self.tabs.setCurrentIndex(self.tabs.indexOf(self.send_tab))NEWLINENEWLINE def show_receive_tab(self):NEWLINE self.tabs.setCurrentIndex(self.tabs.indexOf(self.receive_tab))NEWLINENEWLINE def receive_at(self, addr):NEWLINE if not bitcoin.is_address(addr):NEWLINE returnNEWLINE self.show_receive_tab()NEWLINE self.receive_address_e.setText(addr)NEWLINE self.new_request_button.setEnabled(True)NEWLINENEWLINE def update_receive_qr(self):NEWLINE addr = str(self.receive_address_e.text())NEWLINE amount = self.receive_amount_e.get_amount()NEWLINE message = self.receive_message_e.text()NEWLINE self.save_request_button.setEnabled((amount is not None) or (message != ""))NEWLINE uri = util.create_bip21_uri(addr, amount, message)NEWLINE self.receive_qr.setData(uri)NEWLINE if self.qr_window and self.qr_window.isVisible():NEWLINE self.qr_window.qrw.setData(uri)NEWLINENEWLINE def update_receive_address_styling(self):NEWLINE addr = str(self.receive_address_e.text())NEWLINE if self.wallet.is_used(addr):NEWLINE self.receive_address_e.setStyleSheet(ColorScheme.RED.as_stylesheet(True))NEWLINE self.receive_address_e.setToolTip(_("This address has already been used. "NEWLINE "For better privacy, do not reuse it for new payments."))NEWLINE else:NEWLINE self.receive_address_e.setStyleSheet("")NEWLINE self.receive_address_e.setToolTip("")NEWLINENEWLINE def set_feerounding_text(self, num_satoshis_added):NEWLINE self.feerounding_text = (_('Additional {} satoshis are going to be added.')NEWLINE .format(num_satoshis_added))NEWLINENEWLINE def create_send_tab(self):NEWLINE # A 4-column grid layout. All the stretch is in the last column.NEWLINE # The exchange rate plugin adds a fiat widget in column 2NEWLINE self.send_grid = grid = QGridLayout()NEWLINE grid.setSpacing(8)NEWLINE grid.setColumnStretch(3, 1)NEWLINENEWLINE from .paytoedit import PayToEditNEWLINE self.amount_e = BTCAmountEdit(self.get_decimal_point)NEWLINE self.payto_e = PayToEdit(self)NEWLINE msg = _('Recipient of the funds.') + '\n\n'\NEWLINE + _('You may enter a Bitcoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a Bitcoin address)')NEWLINE payto_label = HelpLabel(_('Pay to'), msg)NEWLINE grid.addWidget(payto_label, 1, 0)NEWLINE grid.addWidget(self.payto_e, 1, 1, 1, -1)NEWLINENEWLINE completer = QCompleter()NEWLINE completer.setCaseSensitivity(False)NEWLINE self.payto_e.set_completer(completer)NEWLINE completer.setModel(self.completions)NEWLINENEWLINE msg = _('Description of the transaction (not mandatory).') + '\n\n'\NEWLINE + _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')NEWLINE description_label = HelpLabel(_('Description'), msg)NEWLINE grid.addWidget(description_label, 2, 0)NEWLINE self.message_e = MyLineEdit()NEWLINE grid.addWidget(self.message_e, 2, 1, 1, -1)NEWLINENEWLINE self.from_label = QLabel(_('From'))NEWLINE grid.addWidget(self.from_label, 3, 0)NEWLINE self.from_list = FromList(self, self.from_list_menu)NEWLINE grid.addWidget(self.from_list, 3, 1, 1, -1)NEWLINE self.set_pay_from([])NEWLINENEWLINE msg = _('Amount to be sent.') + '\n\n' \NEWLINE + _('The amount will be displayed in red if you do not have enough funds in your wallet.') + ' ' \NEWLINE + _('Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') + '\n\n' \NEWLINE + _('Keyboard shortcut: type "!" to send all your coins.')NEWLINE amount_label = HelpLabel(_('Amount'), msg)NEWLINE grid.addWidget(amount_label, 4, 0)NEWLINE grid.addWidget(self.amount_e, 4, 1)NEWLINENEWLINE self.fiat_send_e = AmountEdit(self.fx.get_currency if self.fx else '')NEWLINE if not self.fx or not self.fx.is_enabled():NEWLINE self.fiat_send_e.setVisible(False)NEWLINE grid.addWidget(self.fiat_send_e, 4, 2)NEWLINE self.amount_e.frozen.connect(NEWLINE lambda: self.fiat_send_e.setFrozen(self.amount_e.isReadOnly()))NEWLINENEWLINE self.max_button = EnterButton(_("Max"), self.spend_max)NEWLINE self.max_button.setFixedWidth(self.amount_e.width())NEWLINE self.max_button.setCheckable(True)NEWLINE grid.addWidget(self.max_button, 4, 3)NEWLINE hbox = QHBoxLayout()NEWLINE hbox.addStretch(1)NEWLINE grid.addLayout(hbox, 4, 4)NEWLINENEWLINE msg = _('Bitcoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\NEWLINE + _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\NEWLINE + _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')NEWLINE self.fee_e_label = HelpLabel(_('Fee'), msg)NEWLINENEWLINE def fee_cb(dyn, pos, fee_rate):NEWLINE if dyn:NEWLINE if self.config.use_mempool_fees():NEWLINE self.config.set_key('depth_level', pos, False)NEWLINE else:NEWLINE self.config.set_key('fee_level', pos, False)NEWLINE else:NEWLINE self.config.set_key('fee_per_kb', fee_rate, False)NEWLINENEWLINE if fee_rate:NEWLINE fee_rate = Decimal(fee_rate)NEWLINE self.feerate_e.setAmount(quantize_feerate(fee_rate / 1000))NEWLINE else:NEWLINE self.feerate_e.setAmount(None)NEWLINE self.fee_e.setModified(False)NEWLINENEWLINE self.fee_slider.activate()NEWLINE self.spend_max() if self.max_button.isChecked() else self.update_fee()NEWLINENEWLINE self.fee_slider = FeeSlider(self, self.config, fee_cb)NEWLINE self.fee_slider.setFixedWidth(self.amount_e.width())NEWLINENEWLINE def on_fee_or_feerate(edit_changed, editing_finished):NEWLINE edit_other = self.feerate_e if edit_changed == self.fee_e else self.fee_eNEWLINE if editing_finished:NEWLINE if edit_changed.get_amount() is None:NEWLINE # This is so that when the user blanks the fee and moves on,NEWLINE # we go back to auto-calculate mode and put a fee back.NEWLINE edit_changed.setModified(False)NEWLINE else:NEWLINE # edit_changed was edited just now, so make sure we willNEWLINE # freeze the correct fee setting (this)NEWLINE edit_other.setModified(False)NEWLINE self.fee_slider.deactivate()NEWLINE self.update_fee()NEWLINENEWLINE class TxSizeLabel(QLabel):NEWLINE def setAmount(self, byte_size):NEWLINE self.setText(('x %s bytes =' % byte_size) if byte_size else '')NEWLINENEWLINE self.size_e = TxSizeLabel()NEWLINE self.size_e.setAlignment(Qt.AlignCenter)NEWLINE self.size_e.setAmount(0)NEWLINE self.size_e.setFixedWidth(self.amount_e.width())NEWLINE self.size_e.setStyleSheet(ColorScheme.DEFAULT.as_stylesheet())NEWLINENEWLINE self.feerate_e = FeerateEdit(lambda: 0)NEWLINE self.feerate_e.setAmount(self.config.fee_per_byte())NEWLINE self.feerate_e.textEdited.connect(partial(on_fee_or_feerate, self.feerate_e, False))NEWLINE self.feerate_e.editingFinished.connect(partial(on_fee_or_feerate, self.feerate_e, True))NEWLINENEWLINE self.fee_e = BTCAmountEdit(self.get_decimal_point)NEWLINE self.fee_e.textEdited.connect(partial(on_fee_or_feerate, self.fee_e, False))NEWLINE self.fee_e.editingFinished.connect(partial(on_fee_or_feerate, self.fee_e, True))NEWLINENEWLINE def feerounding_onclick():NEWLINE text = (self.feerounding_text + '\n\n' +NEWLINE _('To somewhat protect your privacy, Electrum tries to create change with similar precision to other outputs.') + ' ' +NEWLINE _('At most 100 satoshis might be lost due to this rounding.') + ' ' +NEWLINE _("You can disable this setting in '{}'.").format(_('Preferences')) + '\n' +NEWLINE _('Also, dust is not kept as change, but added to the fee.') + '\n' +NEWLINE _('Also, when batching RBF transactions, BIP 125 imposes a lower bound on the fee.'))NEWLINE self.show_message(title=_('Fee rounding'), msg=text)NEWLINENEWLINE self.feerounding_icon = QPushButton(read_QIcon('info.png'), '')NEWLINE self.feerounding_icon.setFixedWidth(round(2.2 * char_width_in_lineedit()))NEWLINE self.feerounding_icon.setFlat(True)NEWLINE self.feerounding_icon.clicked.connect(feerounding_onclick)NEWLINE self.feerounding_icon.setVisible(False)NEWLINENEWLINE self.connect_fields(self, self.amount_e, self.fiat_send_e, self.fee_e)NEWLINENEWLINE vbox_feelabel = QVBoxLayout()NEWLINE vbox_feelabel.addWidget(self.fee_e_label)NEWLINE vbox_feelabel.addStretch(1)NEWLINE grid.addLayout(vbox_feelabel, 5, 0)NEWLINENEWLINE self.fee_adv_controls = QWidget()NEWLINE hbox = QHBoxLayout(self.fee_adv_controls)NEWLINE hbox.setContentsMargins(0, 0, 0, 0)NEWLINE hbox.addWidget(self.feerate_e)NEWLINE hbox.addWidget(self.size_e)NEWLINE hbox.addWidget(self.fee_e)NEWLINE hbox.addWidget(self.feerounding_icon, Qt.AlignLeft)NEWLINE hbox.addStretch(1)NEWLINENEWLINE vbox_feecontrol = QVBoxLayout()NEWLINE vbox_feecontrol.addWidget(self.fee_adv_controls)NEWLINE vbox_feecontrol.addWidget(self.fee_slider)NEWLINENEWLINE grid.addLayout(vbox_feecontrol, 5, 1, 1, -1)NEWLINENEWLINE if not self.config.get('show_fee', False):NEWLINE self.fee_adv_controls.setVisible(False)NEWLINENEWLINE self.preview_button = EnterButton(_("Preview"), self.do_preview)NEWLINE self.preview_button.setToolTip(_('Display the details of your transaction before signing it.'))NEWLINE self.send_button = EnterButton(_("Send"), self.do_send)NEWLINE self.clear_button = EnterButton(_("Clear"), self.do_clear)NEWLINE buttons = QHBoxLayout()NEWLINE buttons.addStretch(1)NEWLINE buttons.addWidget(self.clear_button)NEWLINE buttons.addWidget(self.preview_button)NEWLINE buttons.addWidget(self.send_button)NEWLINE grid.addLayout(buttons, 6, 1, 1, 3)NEWLINENEWLINE self.amount_e.shortcut.connect(self.spend_max)NEWLINE self.payto_e.textChanged.connect(self.update_fee)NEWLINE self.amount_e.textEdited.connect(self.update_fee)NEWLINENEWLINE def reset_max(text):NEWLINE self.max_button.setChecked(False)NEWLINE enable = not bool(text) and not self.amount_e.isReadOnly()NEWLINE self.max_button.setEnabled(enable)NEWLINE self.amount_e.textEdited.connect(reset_max)NEWLINE self.fiat_send_e.textEdited.connect(reset_max)NEWLINENEWLINE def entry_changed():NEWLINE text = ""NEWLINENEWLINE amt_color = ColorScheme.DEFAULTNEWLINE fee_color = ColorScheme.DEFAULTNEWLINE feerate_color = ColorScheme.DEFAULTNEWLINENEWLINE if self.not_enough_funds:NEWLINE amt_color, fee_color = ColorScheme.RED, ColorScheme.REDNEWLINE feerate_color = ColorScheme.REDNEWLINE text = _("Not enough funds")NEWLINE c, u, x = self.wallet.get_frozen_balance()NEWLINE if c+u+x:NEWLINE text += " ({} {} {})".format(NEWLINE self.format_amount(c + u + x).strip(), self.base_unit(), _("are frozen")NEWLINE )NEWLINENEWLINE # blue color denotes auto-filled valuesNEWLINE elif self.fee_e.isModified():NEWLINE feerate_color = ColorScheme.BLUENEWLINE elif self.feerate_e.isModified():NEWLINE fee_color = ColorScheme.BLUENEWLINE elif self.amount_e.isModified():NEWLINE fee_color = ColorScheme.BLUENEWLINE feerate_color = ColorScheme.BLUENEWLINE else:NEWLINE amt_color = ColorScheme.BLUENEWLINE fee_color = ColorScheme.BLUENEWLINE feerate_color = ColorScheme.BLUENEWLINENEWLINE self.statusBar().showMessage(text)NEWLINE self.amount_e.setStyleSheet(amt_color.as_stylesheet())NEWLINE self.fee_e.setStyleSheet(fee_color.as_stylesheet())NEWLINE self.feerate_e.setStyleSheet(feerate_color.as_stylesheet())NEWLINENEWLINE self.amount_e.textChanged.connect(entry_changed)NEWLINE self.fee_e.textChanged.connect(entry_changed)NEWLINE self.feerate_e.textChanged.connect(entry_changed)NEWLINENEWLINE self.invoices_label = QLabel(_('Invoices'))NEWLINE from .invoice_list import InvoiceListNEWLINE self.invoice_list = InvoiceList(self)NEWLINENEWLINE vbox0 = QVBoxLayout()NEWLINE vbox0.addLayout(grid)NEWLINE hbox = QHBoxLayout()NEWLINE hbox.addLayout(vbox0)NEWLINE w = QWidget()NEWLINE vbox = QVBoxLayout(w)NEWLINE vbox.addLayout(hbox)NEWLINE vbox.addStretch(1)NEWLINE vbox.addWidget(self.invoices_label)NEWLINE vbox.addWidget(self.invoice_list)NEWLINE vbox.setStretchFactor(self.invoice_list, 1000)NEWLINE w.searchable_list = self.invoice_listNEWLINE run_hook('create_send_tab', grid)NEWLINE return wNEWLINENEWLINE def spend_max(self):NEWLINE if run_hook('abort_send', self):NEWLINE returnNEWLINE self.max_button.setChecked(True)NEWLINE self.do_update_fee()NEWLINENEWLINE def update_fee(self):NEWLINE self.require_fee_update = TrueNEWLINENEWLINE def get_payto_or_dummy(self):NEWLINE r = self.payto_e.get_recipient()NEWLINE if r:NEWLINE return rNEWLINE return (TYPE_ADDRESS, self.wallet.dummy_address())NEWLINENEWLINE def do_update_fee(self):NEWLINE '''Recalculate the fee. If the fee was manually input, retain it, butNEWLINE still build the TX to see if there are enough funds.NEWLINE '''NEWLINE freeze_fee = self.is_send_fee_frozen()NEWLINE freeze_feerate = self.is_send_feerate_frozen()NEWLINE amount = '!' if self.max_button.isChecked() else self.amount_e.get_amount()NEWLINE if amount is None:NEWLINE if not freeze_fee:NEWLINE self.fee_e.setAmount(None)NEWLINE self.not_enough_funds = FalseNEWLINE self.statusBar().showMessage('')NEWLINE returnNEWLINENEWLINE outputs, fee_estimator, tx_desc, coins = self.read_send_tab()NEWLINE if not outputs:NEWLINE _type, addr = self.get_payto_or_dummy()NEWLINE outputs = [TxOutput(_type, addr, amount)]NEWLINE is_sweep = bool(self.tx_external_keypairs)NEWLINE make_tx = lambda fee_est: \NEWLINE self.wallet.make_unsigned_transaction(NEWLINE coins, outputs, self.config,NEWLINE fixed_fee=fee_est, is_sweep=is_sweep)NEWLINE try:NEWLINE tx = make_tx(fee_estimator)NEWLINE self.not_enough_funds = FalseNEWLINE except (NotEnoughFunds, NoDynamicFeeEstimates) as e:NEWLINE if not freeze_fee:NEWLINE self.fee_e.setAmount(None)NEWLINE if not freeze_feerate:NEWLINE self.feerate_e.setAmount(None)NEWLINE self.feerounding_icon.setVisible(False)NEWLINENEWLINE if isinstance(e, NotEnoughFunds):NEWLINE self.not_enough_funds = TrueNEWLINE elif isinstance(e, NoDynamicFeeEstimates):NEWLINE try:NEWLINE tx = make_tx(0)NEWLINE size = tx.estimated_size()NEWLINE self.size_e.setAmount(size)NEWLINE except BaseException:NEWLINE passNEWLINE returnNEWLINE except BaseException:NEWLINE self.logger.exception('')NEWLINE returnNEWLINENEWLINE size = tx.estimated_size()NEWLINE self.size_e.setAmount(size)NEWLINENEWLINE fee = tx.get_fee()NEWLINE fee = None if self.not_enough_funds else feeNEWLINENEWLINE # Displayed fee/fee_rate values are set according to user input.NEWLINE # Due to rounding or dropping dust in CoinChooser,NEWLINE # actual fees often differ somewhat.NEWLINE if freeze_feerate or self.fee_slider.is_active():NEWLINE displayed_feerate = self.feerate_e.get_amount()NEWLINE if displayed_feerate is not None:NEWLINE displayed_feerate = quantize_feerate(displayed_feerate)NEWLINE else:NEWLINE # fallback to actual feeNEWLINE displayed_feerate = quantize_feerate(fee / size) if fee is not None else NoneNEWLINE self.feerate_e.setAmount(displayed_feerate)NEWLINE displayed_fee = round(displayed_feerate * size) if displayed_feerate is not None else NoneNEWLINE self.fee_e.setAmount(displayed_fee)NEWLINE else:NEWLINE if freeze_fee:NEWLINE displayed_fee = self.fee_e.get_amount()NEWLINE else:NEWLINE # fallback to actual fee if nothing is frozenNEWLINE displayed_fee = feeNEWLINE self.fee_e.setAmount(displayed_fee)NEWLINE displayed_fee = displayed_fee if displayed_fee else 0NEWLINE displayed_feerate = quantize_feerate(displayed_fee / size) if displayed_fee is not None else NoneNEWLINE self.feerate_e.setAmount(displayed_feerate)NEWLINENEWLINE # show/hide fee rounding iconNEWLINE feerounding = (fee - displayed_fee) if fee else 0NEWLINE self.set_feerounding_text(int(feerounding))NEWLINE self.feerounding_icon.setToolTip(self.feerounding_text)NEWLINE self.feerounding_icon.setVisible(abs(feerounding) >= 1)NEWLINENEWLINE if self.max_button.isChecked():NEWLINE amount = tx.output_value()NEWLINE __, x_fee_amount = run_hook('get_tx_extra_fee', self.wallet, tx) or (None, 0)NEWLINE amount_after_all_fees = amount - x_fee_amountNEWLINE self.amount_e.setAmount(amount_after_all_fees)NEWLINENEWLINE def from_list_delete(self, item):NEWLINE i = self.from_list.indexOfTopLevelItem(item)NEWLINE self.pay_from.pop(i)NEWLINE self.redraw_from_list()NEWLINE self.update_fee()NEWLINENEWLINE def from_list_menu(self, position):NEWLINE item = self.from_list.itemAt(position)NEWLINE menu = QMenu()NEWLINE menu.addAction(_("Remove"), lambda: self.from_list_delete(item))NEWLINE menu.exec_(self.from_list.viewport().mapToGlobal(position))NEWLINENEWLINE def set_pay_from(self, coins):NEWLINE self.pay_from = list(coins)NEWLINE self.redraw_from_list()NEWLINENEWLINE def redraw_from_list(self):NEWLINE self.from_list.clear()NEWLINE self.from_label.setHidden(len(self.pay_from) == 0)NEWLINE self.from_list.setHidden(len(self.pay_from) == 0)NEWLINENEWLINE def format(x):NEWLINE h = x.get('prevout_hash')NEWLINE return h[0:10] + '...' + h[-10:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address')NEWLINENEWLINE for item in self.pay_from:NEWLINE self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ]))NEWLINENEWLINE def get_contact_payto(self, key):NEWLINE _type, label = self.contacts.get(key)NEWLINE return label + ' <' + key + '>' if _type == 'address' else keyNEWLINENEWLINE def update_completions(self):NEWLINE l = [self.get_contact_payto(key) for key in self.contacts.keys()]NEWLINE self.completions.setStringList(l)NEWLINENEWLINE def protected(func):NEWLINE '''Password request wrapper. The password is passed to the functionNEWLINE as the 'password' named argument. "None" indicates either anNEWLINE unencrypted wallet, or the user cancelled the password request.NEWLINE An empty input is passed as the empty string.'''NEWLINE def request_password(self, *args, **kwargs):NEWLINE parent = self.top_level_window()NEWLINE password = NoneNEWLINE while self.wallet.has_keystore_encryption():NEWLINE password = self.password_dialog(parent=parent)NEWLINE if password is None:NEWLINE # User cancelled password inputNEWLINE returnNEWLINE try:NEWLINE self.wallet.check_password(password)NEWLINE breakNEWLINE except Exception as e:NEWLINE self.show_error(str(e), parent=parent)NEWLINE continueNEWLINENEWLINE kwargs['password'] = passwordNEWLINE return func(self, *args, **kwargs)NEWLINE return request_passwordNEWLINENEWLINE def is_send_fee_frozen(self):NEWLINE return self.fee_e.isVisible() and self.fee_e.isModified() \NEWLINE and (self.fee_e.text() or self.fee_e.hasFocus())NEWLINENEWLINE def is_send_feerate_frozen(self):NEWLINE return self.feerate_e.isVisible() and self.feerate_e.isModified() \NEWLINE and (self.feerate_e.text() or self.feerate_e.hasFocus())NEWLINENEWLINE def get_send_fee_estimator(self):NEWLINE if self.is_send_fee_frozen():NEWLINE fee_estimator = self.fee_e.get_amount()NEWLINE elif self.is_send_feerate_frozen():NEWLINE amount = self.feerate_e.get_amount() # sat/byte feerateNEWLINE amount = 0 if amount is None else amount * 1000 # sat/kilobyte feerateNEWLINE fee_estimator = partial(NEWLINE simple_config.SimpleConfig.estimate_fee_for_feerate, amount)NEWLINE else:NEWLINE fee_estimator = NoneNEWLINE return fee_estimatorNEWLINENEWLINE def read_send_tab(self):NEWLINE label = self.message_e.text()NEWLINE if self.payment_request:NEWLINE outputs = self.payment_request.get_outputs()NEWLINE else:NEWLINE outputs = self.payto_e.get_outputs(self.max_button.isChecked())NEWLINE fee_estimator = self.get_send_fee_estimator()NEWLINE coins = self.get_coins()NEWLINE return outputs, fee_estimator, label, coinsNEWLINENEWLINE def check_send_tab_outputs_and_show_errors(self, outputs) -> bool:NEWLINE """Returns whether there are errors with outputs.NEWLINE Also shows error dialog to user if so.NEWLINE """NEWLINE pr = self.payment_requestNEWLINE if pr:NEWLINE if pr.has_expired():NEWLINE self.show_error(_('Payment request has expired'))NEWLINE return TrueNEWLINENEWLINE if not pr:NEWLINE errors = self.payto_e.get_errors()NEWLINE if errors:NEWLINE self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors]))NEWLINE return TrueNEWLINENEWLINE if self.payto_e.is_alias and self.payto_e.validated is False:NEWLINE alias = self.payto_e.toPlainText()NEWLINE msg = _('WARNING: the alias "{}" could not be validated via an additional 'NEWLINE 'security check, DNSSEC, and thus may not be correct.').format(alias) + '\n'NEWLINE msg += _('Do you wish to continue?')NEWLINE if not self.question(msg):NEWLINE return TrueNEWLINENEWLINE if not outputs:NEWLINE self.show_error(_('No outputs'))NEWLINE return TrueNEWLINENEWLINE for o in outputs:NEWLINE if o.address is None:NEWLINE self.show_error(_('Bitcoin Address is None'))NEWLINE return TrueNEWLINE if o.type == TYPE_ADDRESS and not bitcoin.is_address(o.address):NEWLINE self.show_error(_('Invalid Bitcoin Address'))NEWLINE return TrueNEWLINE if o.value is None:NEWLINE self.show_error(_('Invalid Amount'))NEWLINE return TrueNEWLINENEWLINE return False # no errorsNEWLINENEWLINE def do_preview(self):NEWLINE self.do_send(preview = True)NEWLINENEWLINE def do_send(self, preview = False):NEWLINE if run_hook('abort_send', self):NEWLINE returnNEWLINE outputs, fee_estimator, tx_desc, coins = self.read_send_tab()NEWLINE if self.check_send_tab_outputs_and_show_errors(outputs):NEWLINE returnNEWLINE try:NEWLINE is_sweep = bool(self.tx_external_keypairs)NEWLINE tx = self.wallet.make_unsigned_transaction(NEWLINE coins, outputs, self.config, fixed_fee=fee_estimator,NEWLINE is_sweep=is_sweep)NEWLINE except (NotEnoughFunds, NoDynamicFeeEstimates) as e:NEWLINE self.show_message(str(e))NEWLINE returnNEWLINE except InternalAddressCorruption as e:NEWLINE self.show_error(str(e))NEWLINE raiseNEWLINE except BaseException as e:NEWLINE self.logger.exception('')NEWLINE self.show_message(str(e))NEWLINE returnNEWLINENEWLINE amount = tx.output_value() if self.max_button.isChecked() else sum(map(lambda x:x[2], outputs))NEWLINE fee = tx.get_fee()NEWLINENEWLINE use_rbf = self.config.get('use_rbf', True)NEWLINE if use_rbf:NEWLINE tx.set_rbf(True)NEWLINENEWLINE if fee < self.wallet.relayfee() * tx.estimated_size() / 1000:NEWLINE self.show_error('\n'.join([NEWLINE _("This transaction requires a higher fee, or it will not be propagated by your current server"),NEWLINE _("Try to raise your transaction fee, or use a server with a lower relay fee.")NEWLINE ]))NEWLINE returnNEWLINENEWLINE if preview:NEWLINE self.show_transaction(tx, tx_desc)NEWLINE returnNEWLINENEWLINE if not self.network:NEWLINE self.show_error(_("You can't broadcast a transaction without a live network connection."))NEWLINE returnNEWLINENEWLINE # confirmation dialogNEWLINE msg = [NEWLINE _("Amount to be sent") + ": " + self.format_amount_and_units(amount),NEWLINE _("Mining fee") + ": " + self.format_amount_and_units(fee),NEWLINE ]NEWLINENEWLINE x_fee = run_hook('get_tx_extra_fee', self.wallet, tx)NEWLINE if x_fee:NEWLINE x_fee_address, x_fee_amount = x_feeNEWLINE msg.append( _("Additional fees") + ": " + self.format_amount_and_units(x_fee_amount) )NEWLINENEWLINE feerate_warning = simple_config.FEERATE_WARNING_HIGH_FEENEWLINE if fee > feerate_warning * tx.estimated_size() / 1000:NEWLINE msg.append(_('Warning') + ': ' + _("The fee for this transaction seems unusually high."))NEWLINENEWLINE if self.wallet.has_keystore_encryption():NEWLINE msg.append("")NEWLINE msg.append(_("Enter your password to proceed"))NEWLINE password = self.password_dialog('\n'.join(msg))NEWLINE if not password:NEWLINE returnNEWLINE else:NEWLINE msg.append(_('Proceed?'))NEWLINE password = NoneNEWLINE if not self.question('\n'.join(msg)):NEWLINE returnNEWLINENEWLINE def sign_done(success):NEWLINE if success:NEWLINE if not tx.is_complete():NEWLINE self.show_transaction(tx)NEWLINE self.do_clear()NEWLINE else:NEWLINE self.broadcast_transaction(tx, tx_desc)NEWLINE self.sign_tx_with_password(tx, sign_done, password)NEWLINENEWLINE @protectedNEWLINE def sign_tx(self, tx, callback, password):NEWLINE self.sign_tx_with_password(tx, callback, password)NEWLINENEWLINE def sign_tx_with_password(self, tx, callback, password):NEWLINE '''Sign the transaction in a separate thread. When done, callsNEWLINE the callback with a success code of True or False.NEWLINE '''NEWLINE def on_success(result):NEWLINE callback(True)NEWLINE def on_failure(exc_info):NEWLINE self.on_error(exc_info)NEWLINE callback(False)NEWLINE on_success = run_hook('tc_sign_wrapper', self.wallet, tx, on_success, on_failure) or on_successNEWLINE if self.tx_external_keypairs:NEWLINE # can sign directlyNEWLINE task = partial(Transaction.sign, tx, self.tx_external_keypairs)NEWLINE else:NEWLINE task = partial(self.wallet.sign_transaction, tx, password)NEWLINE msg = _('Signing transaction...')NEWLINE WaitingDialog(self, msg, task, on_success, on_failure)NEWLINENEWLINE def broadcast_transaction(self, tx, tx_desc):NEWLINENEWLINE def broadcast_thread():NEWLINE # non-GUI threadNEWLINE pr = self.payment_requestNEWLINE if pr and pr.has_expired():NEWLINE self.payment_request = NoneNEWLINE return False, _("Payment request has expired")NEWLINE status = FalseNEWLINE try:NEWLINE self.network.run_from_another_thread(self.network.broadcast_transaction(tx))NEWLINE except TxBroadcastError as e:NEWLINE msg = e.get_message_for_gui()NEWLINE except BestEffortRequestFailed as e:NEWLINE msg = repr(e)NEWLINE else:NEWLINE status, msg = True, tx.txid()NEWLINE if pr and status is True:NEWLINE self.invoices.set_paid(pr, tx.txid())NEWLINE self.invoices.save()NEWLINE self.payment_request = NoneNEWLINE refund_address = self.wallet.get_receiving_address()NEWLINE coro = pr.send_payment_and_receive_paymentack(str(tx), refund_address)NEWLINE fut = asyncio.run_coroutine_threadsafe(coro, self.network.asyncio_loop)NEWLINE ack_status, ack_msg = fut.result(timeout=20)NEWLINE self.logger.info(f"Payment ACK: {ack_status}. Ack message: {ack_msg}")NEWLINE return status, msgNEWLINENEWLINE # Capture current TL window; override might be removed on returnNEWLINE parent = self.top_level_window(lambda win: isinstance(win, MessageBoxMixin))NEWLINENEWLINE def broadcast_done(result):NEWLINE # GUI threadNEWLINE if result:NEWLINE status, msg = resultNEWLINE if status:NEWLINE if tx_desc is not None and tx.is_complete():NEWLINE self.wallet.set_label(tx.txid(), tx_desc)NEWLINE parent.show_message(_('Payment sent.') + '\n' + msg)NEWLINE self.invoice_list.update()NEWLINE self.do_clear()NEWLINE else:NEWLINE msg = msg or ''NEWLINE parent.show_error(msg)NEWLINENEWLINE WaitingDialog(self, _('Broadcasting transaction...'),NEWLINE broadcast_thread, broadcast_done, self.on_error)NEWLINENEWLINE def query_choice(self, msg, choices):NEWLINE # Needed by QtHandler for hardware walletsNEWLINE dialog = WindowModalDialog(self.top_level_window())NEWLINE clayout = ChoicesLayout(msg, choices)NEWLINE vbox = QVBoxLayout(dialog)NEWLINE vbox.addLayout(clayout.layout())NEWLINE vbox.addLayout(Buttons(OkButton(dialog)))NEWLINE if not dialog.exec_():NEWLINE return NoneNEWLINE return clayout.selected_index()NEWLINENEWLINE def lock_amount(self, b):NEWLINE self.amount_e.setFrozen(b)NEWLINE self.max_button.setEnabled(not b)NEWLINENEWLINE def prepare_for_payment_request(self):NEWLINE self.show_send_tab()NEWLINE self.payto_e.is_pr = TrueNEWLINE for e in [self.payto_e, self.message_e]:NEWLINE e.setFrozen(True)NEWLINE self.lock_amount(True)NEWLINE self.payto_e.setText(_("please wait..."))NEWLINE return TrueNEWLINENEWLINE def delete_invoice(self, key):NEWLINE self.invoices.remove(key)NEWLINE self.invoice_list.update()NEWLINENEWLINE def payment_request_ok(self):NEWLINE pr = self.payment_requestNEWLINE if not pr:NEWLINE returnNEWLINE key = self.invoices.add(pr)NEWLINE status = self.invoices.get_status(key)NEWLINE self.invoice_list.update()NEWLINE if status == PR_PAID:NEWLINE self.show_message("invoice already paid")NEWLINE self.do_clear()NEWLINE self.payment_request = NoneNEWLINE returnNEWLINE self.payto_e.is_pr = TrueNEWLINE if not pr.has_expired():NEWLINE self.payto_e.setGreen()NEWLINE else:NEWLINE self.payto_e.setExpired()NEWLINE self.payto_e.setText(pr.get_requestor())NEWLINE self.amount_e.setText(format_satoshis_plain(pr.get_amount(), self.decimal_point))NEWLINE self.message_e.setText(pr.get_memo())NEWLINE # signal to set feeNEWLINE self.amount_e.textEdited.emit("")NEWLINENEWLINE def payment_request_error(self):NEWLINE pr = self.payment_requestNEWLINE if not pr:NEWLINE returnNEWLINE self.show_message(pr.error)NEWLINE self.payment_request = NoneNEWLINE self.do_clear()NEWLINENEWLINE def on_pr(self, request):NEWLINE self.payment_request = requestNEWLINE if self.payment_request.verify(self.contacts):NEWLINE self.payment_request_ok_signal.emit()NEWLINE else:NEWLINE self.payment_request_error_signal.emit()NEWLINENEWLINE def pay_to_URI(self, URI):NEWLINE if not URI:NEWLINE returnNEWLINE try:NEWLINE out = util.parse_URI(URI, self.on_pr)NEWLINE except InvalidBitcoinURI as e:NEWLINE self.show_error(_("Error parsing URI") + f":\n{e}")NEWLINE returnNEWLINE self.show_send_tab()NEWLINE r = out.get('r')NEWLINE sig = out.get('sig')NEWLINE name = out.get('name')NEWLINE if r or (name and sig):NEWLINE self.prepare_for_payment_request()NEWLINE returnNEWLINE address = out.get('address')NEWLINE amount = out.get('amount')NEWLINE label = out.get('label')NEWLINE message = out.get('message')NEWLINE # use label as description (not BIP21 compliant)NEWLINE if label and not message:NEWLINE message = labelNEWLINE if address:NEWLINE self.payto_e.setText(address)NEWLINE if message:NEWLINE self.message_e.setText(message)NEWLINE if amount:NEWLINE self.amount_e.setAmount(amount)NEWLINE self.amount_e.textEdited.emit("")NEWLINENEWLINENEWLINE def do_clear(self):NEWLINE self.max_button.setChecked(False)NEWLINE self.not_enough_funds = FalseNEWLINE self.payment_request = NoneNEWLINE self.payto_e.is_pr = FalseNEWLINE for e in [self.payto_e, self.message_e, self.amount_e, self.fiat_send_e,NEWLINE self.fee_e, self.feerate_e]:NEWLINE e.setText('')NEWLINE e.setFrozen(False)NEWLINE self.fee_slider.activate()NEWLINE self.feerate_e.setAmount(self.config.fee_per_byte())NEWLINE self.size_e.setAmount(0)NEWLINE self.feerounding_icon.setVisible(False)NEWLINE self.set_pay_from([])NEWLINE self.tx_external_keypairs = {}NEWLINE self.update_status()NEWLINE run_hook('do_clear', self)NEWLINENEWLINE def set_frozen_state_of_addresses(self, addrs, freeze: bool):NEWLINE self.wallet.set_frozen_state_of_addresses(addrs, freeze)NEWLINE self.address_list.update()NEWLINE self.utxo_list.update()NEWLINE self.update_fee()NEWLINENEWLINE def set_frozen_state_of_coins(self, utxos, freeze: bool):NEWLINE self.wallet.set_frozen_state_of_coins(utxos, freeze)NEWLINE self.utxo_list.update()NEWLINE self.update_fee()NEWLINENEWLINE def create_list_tab(self, l, toolbar=None):NEWLINE w = QWidget()NEWLINE w.searchable_list = lNEWLINE vbox = QVBoxLayout()NEWLINE w.setLayout(vbox)NEWLINE vbox.setContentsMargins(0, 0, 0, 0)NEWLINE vbox.setSpacing(0)NEWLINE if toolbar:NEWLINE vbox.addLayout(toolbar)NEWLINE vbox.addWidget(l)NEWLINE return wNEWLINENEWLINE def create_addresses_tab(self):NEWLINE from .address_list import AddressListNEWLINE self.address_list = l = AddressList(self)NEWLINE toolbar = l.create_toolbar(self.config)NEWLINE toolbar_shown = self.config.get('show_toolbar_addresses', False)NEWLINE l.show_toolbar(toolbar_shown)NEWLINE return self.create_list_tab(l, toolbar)NEWLINENEWLINE def create_utxo_tab(self):NEWLINE from .utxo_list import UTXOListNEWLINE self.utxo_list = l = UTXOList(self)NEWLINE return self.create_list_tab(l)NEWLINENEWLINE def create_contacts_tab(self):NEWLINE from .contact_list import ContactListNEWLINE self.contact_list = l = ContactList(self)NEWLINE return self.create_list_tab(l)NEWLINENEWLINE def remove_address(self, addr):NEWLINE if self.question(_("Do you want to remove {} from your wallet?").format(addr)):NEWLINE self.wallet.delete_address(addr)NEWLINE self.need_update.set() # history, addresses, coinsNEWLINE self.clear_receive_tab()NEWLINENEWLINE def get_coins(self):NEWLINE if self.pay_from:NEWLINE return self.pay_fromNEWLINE else:NEWLINE return self.wallet.get_spendable_coins(None, self.config)NEWLINENEWLINE def spend_coins(self, coins):NEWLINE self.set_pay_from(coins)NEWLINE self.show_send_tab()NEWLINE self.update_fee()NEWLINENEWLINE def paytomany(self):NEWLINE self.show_send_tab()NEWLINE self.payto_e.paytomany()NEWLINE msg = '\n'.join([NEWLINE _('Enter a list of outputs in the \'Pay to\' field.'),NEWLINE _('One output per line.'),NEWLINE _('Format: address, amount'),NEWLINE _('You may load a CSV file using the file icon.')NEWLINE ])NEWLINE self.show_message(msg, title=_('Pay to many'))NEWLINENEWLINE def payto_contacts(self, labels):NEWLINE paytos = [self.get_contact_payto(label) for label in labels]NEWLINE self.show_send_tab()NEWLINE if len(paytos) == 1:NEWLINE self.payto_e.setText(paytos[0])NEWLINE self.amount_e.setFocus()NEWLINE else:NEWLINE text = "\n".join([payto + ", 0" for payto in paytos])NEWLINE self.payto_e.setText(text)NEWLINE self.payto_e.setFocus()NEWLINENEWLINE def set_contact(self, label, address):NEWLINE if not is_address(address):NEWLINE self.show_error(_('Invalid Address'))NEWLINE self.contact_list.update() # Displays original unchanged valueNEWLINE return FalseNEWLINE self.contacts[address] = ('address', label)NEWLINE self.contact_list.update()NEWLINE self.history_list.update()NEWLINE self.update_completions()NEWLINE return TrueNEWLINENEWLINE def delete_contacts(self, labels):NEWLINE if not self.question(_("Remove {} from your list of contacts?")NEWLINE .format(" + ".join(labels))):NEWLINE returnNEWLINE for label in labels:NEWLINE self.contacts.pop(label)NEWLINE self.history_list.update()NEWLINE self.contact_list.update()NEWLINE self.update_completions()NEWLINENEWLINE def show_invoice(self, key):NEWLINE pr = self.invoices.get(key)NEWLINE if pr is None:NEWLINE self.show_error('Cannot find payment request in wallet.')NEWLINE returnNEWLINE pr.verify(self.contacts)NEWLINE self.show_pr_details(pr)NEWLINENEWLINE def show_pr_details(self, pr):NEWLINE key = pr.get_id()NEWLINE d = WindowModalDialog(self, _("Invoice"))NEWLINE vbox = QVBoxLayout(d)NEWLINE grid = QGridLayout()NEWLINE grid.addWidget(QLabel(_("Requestor") + ':'), 0, 0)NEWLINE grid.addWidget(QLabel(pr.get_requestor()), 0, 1)NEWLINE grid.addWidget(QLabel(_("Amount") + ':'), 1, 0)NEWLINE outputs_str = '\n'.join(map(lambda x: self.format_amount(x[2])+ self.base_unit() + ' @ ' + x[1], pr.get_outputs()))NEWLINE grid.addWidget(QLabel(outputs_str), 1, 1)NEWLINE expires = pr.get_expiration_date()NEWLINE grid.addWidget(QLabel(_("Memo") + ':'), 2, 0)NEWLINE grid.addWidget(QLabel(pr.get_memo()), 2, 1)NEWLINE grid.addWidget(QLabel(_("Signature") + ':'), 3, 0)NEWLINE grid.addWidget(QLabel(pr.get_verify_status()), 3, 1)NEWLINE if expires:NEWLINE grid.addWidget(QLabel(_("Expires") + ':'), 4, 0)NEWLINE grid.addWidget(QLabel(format_time(expires)), 4, 1)NEWLINE vbox.addLayout(grid)NEWLINE def do_export():NEWLINE name = str(key) + '.bip70'NEWLINE fn = self.getSaveFileName(_("Save invoice to file"), name, filter="*.bip70")NEWLINE if not fn:NEWLINE returnNEWLINE with open(fn, 'wb') as f:NEWLINE data = f.write(pr.raw)NEWLINE self.show_message(_('Invoice saved as' + ' ' + fn))NEWLINE exportButton = EnterButton(_('Save'), do_export)NEWLINE def do_delete():NEWLINE if self.question(_('Delete invoice?')):NEWLINE self.invoices.remove(key)NEWLINE self.history_list.update()NEWLINE self.invoice_list.update()NEWLINE d.close()NEWLINE deleteButton = EnterButton(_('Delete'), do_delete)NEWLINE vbox.addLayout(Buttons(exportButton, deleteButton, CloseButton(d)))NEWLINE d.exec_()NEWLINENEWLINE def do_pay_invoice(self, key):NEWLINE pr = self.invoices.get(key)NEWLINE self.payment_request = prNEWLINE self.prepare_for_payment_request()NEWLINE pr.error = None # this forces verify() to re-runNEWLINE if pr.verify(self.contacts):NEWLINE self.payment_request_ok()NEWLINE else:NEWLINE self.payment_request_error()NEWLINENEWLINE def create_console_tab(self):NEWLINE from .console import ConsoleNEWLINE self.console = console = Console()NEWLINE return consoleNEWLINENEWLINE def update_console(self):NEWLINE console = self.consoleNEWLINE console.history = self.config.get("console-history",[])NEWLINE console.history_index = len(console.history)NEWLINENEWLINE console.updateNamespace({NEWLINE 'wallet': self.wallet,NEWLINE 'network': self.network,NEWLINE 'plugins': self.gui_object.plugins,NEWLINE 'window': self,NEWLINE 'config': self.config,NEWLINE 'electrum': electrum,NEWLINE 'daemon': self.gui_object.daemon,NEWLINE 'util': util,NEWLINE 'bitcoin': bitcoin,NEWLINE })NEWLINENEWLINE c = commands.Commands(self.config, self.wallet, self.network, lambda: self.console.set_json(True))NEWLINE methods = {}NEWLINE def mkfunc(f, method):NEWLINE return lambda *args: f(method, args, self.password_dialog)NEWLINE for m in dir(c):NEWLINE if m[0]=='_' or m in ['network','wallet','config']: continueNEWLINE methods[m] = mkfunc(c._run, m)NEWLINENEWLINE console.updateNamespace(methods)NEWLINENEWLINE def create_status_bar(self):NEWLINENEWLINE sb = QStatusBar()NEWLINE sb.setFixedHeight(35)NEWLINENEWLINE self.balance_label = QLabel("Loading wallet...")NEWLINE self.balance_label.setTextInteractionFlags(Qt.TextSelectableByMouse)NEWLINE self.balance_label.setStyleSheet("""QLabel { padding: 0 }""")NEWLINE sb.addWidget(self.balance_label)NEWLINENEWLINE self.search_box = QLineEdit()NEWLINE self.search_box.textChanged.connect(self.do_search)NEWLINE self.search_box.hide()NEWLINE sb.addPermanentWidget(self.search_box)NEWLINENEWLINE self.update_check_button = QPushButton("")NEWLINE self.update_check_button.setFlat(True)NEWLINE self.update_check_button.setCursor(QCursor(Qt.PointingHandCursor))NEWLINE self.update_check_button.setIcon(read_QIcon("update.png"))NEWLINE self.update_check_button.hide()NEWLINE sb.addPermanentWidget(self.update_check_button)NEWLINENEWLINE self.password_button = StatusBarButton(QIcon(), _("Password"), self.change_password_dialog )NEWLINE sb.addPermanentWidget(self.password_button)NEWLINENEWLINE sb.addPermanentWidget(StatusBarButton(read_QIcon("preferences.png"), _("Preferences"), self.settings_dialog ) )NEWLINE self.seed_button = StatusBarButton(read_QIcon("seed.png"), _("Seed"), self.show_seed_dialog )NEWLINE sb.addPermanentWidget(self.seed_button)NEWLINE self.status_button = StatusBarButton(read_QIcon("status_disconnected.png"), _("Network"), lambda: self.gui_object.show_network_dialog(self))NEWLINE sb.addPermanentWidget(self.status_button)NEWLINE run_hook('create_status_bar', sb)NEWLINE self.setStatusBar(sb)NEWLINENEWLINE def update_lock_icon(self):NEWLINE icon = read_QIcon("lock.png") if self.wallet.has_password() else read_QIcon("unlock.png")NEWLINE self.password_button.setIcon(icon)NEWLINENEWLINE def update_buttons_on_seed(self):NEWLINE self.seed_button.setVisible(self.wallet.has_seed())NEWLINE self.password_button.setVisible(self.wallet.may_have_password())NEWLINE self.send_button.setVisible(not self.wallet.is_watching_only())NEWLINENEWLINE def change_password_dialog(self):NEWLINE from electrum.storage import STO_EV_XPUB_PWNEWLINE if self.wallet.get_available_storage_encryption_version() == STO_EV_XPUB_PW:NEWLINE from .password_dialog import ChangePasswordDialogForHWNEWLINE d = ChangePasswordDialogForHW(self, self.wallet)NEWLINE ok, encrypt_file = d.run()NEWLINE if not ok:NEWLINE returnNEWLINENEWLINE try:NEWLINE hw_dev_pw = self.wallet.keystore.get_password_for_storage_encryption()NEWLINE except UserCancelled:NEWLINE returnNEWLINE except BaseException as e:NEWLINE self.logger.exception('')NEWLINE self.show_error(repr(e))NEWLINE returnNEWLINE old_password = hw_dev_pw if self.wallet.has_password() else NoneNEWLINE new_password = hw_dev_pw if encrypt_file else NoneNEWLINE else:NEWLINE from .password_dialog import ChangePasswordDialogForSWNEWLINE d = ChangePasswordDialogForSW(self, self.wallet)NEWLINE ok, old_password, new_password, encrypt_file = d.run()NEWLINENEWLINE if not ok:NEWLINE returnNEWLINE try:NEWLINE self.wallet.update_password(old_password, new_password, encrypt_file)NEWLINE except InvalidPassword as e:NEWLINE self.show_error(str(e))NEWLINE returnNEWLINE except BaseException:NEWLINE self.logger.exception('Failed to update password')NEWLINE self.show_error(_('Failed to update password'))NEWLINE returnNEWLINE msg = _('Password was updated successfully') if self.wallet.has_password() else _('Password is disabled, this wallet is not protected')NEWLINE self.show_message(msg, title=_("Success"))NEWLINE self.update_lock_icon()NEWLINENEWLINE def toggle_search(self):NEWLINE tab = self.tabs.currentWidget()NEWLINE #if hasattr(tab, 'searchable_list'):NEWLINE # tab.searchable_list.toggle_toolbar()NEWLINE #returnNEWLINE self.search_box.setHidden(not self.search_box.isHidden())NEWLINE if not self.search_box.isHidden():NEWLINE self.search_box.setFocus(1)NEWLINE else:NEWLINE self.do_search('')NEWLINENEWLINE def do_search(self, t):NEWLINE tab = self.tabs.currentWidget()NEWLINE if hasattr(tab, 'searchable_list'):NEWLINE tab.searchable_list.filter(t)NEWLINENEWLINE def new_contact_dialog(self):NEWLINE d = WindowModalDialog(self, _("New Contact"))NEWLINE vbox = QVBoxLayout(d)NEWLINE vbox.addWidget(QLabel(_('New Contact') + ':'))NEWLINE grid = QGridLayout()NEWLINE line1 = QLineEdit()NEWLINE line1.setFixedWidth(32 * char_width_in_lineedit())NEWLINE line2 = QLineEdit()NEWLINE line2.setFixedWidth(32 * char_width_in_lineedit())NEWLINE grid.addWidget(QLabel(_("Address")), 1, 0)NEWLINE grid.addWidget(line1, 1, 1)NEWLINE grid.addWidget(QLabel(_("Name")), 2, 0)NEWLINE grid.addWidget(line2, 2, 1)NEWLINE vbox.addLayout(grid)NEWLINE vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))NEWLINE if d.exec_():NEWLINE self.set_contact(line2.text(), line1.text())NEWLINENEWLINE def show_master_public_keys(self):NEWLINE dialog = WindowModalDialog(self, _("Wallet Information"))NEWLINE dialog.setMinimumSize(500, 100)NEWLINE mpk_list = self.wallet.get_master_public_keys()NEWLINE vbox = QVBoxLayout()NEWLINE wallet_type = self.wallet.storage.get('wallet_type', '')NEWLINE if self.wallet.is_watching_only():NEWLINE wallet_type += ' [{}]'.format(_('watching-only'))NEWLINE seed_available = _('True') if self.wallet.has_seed() else _('False')NEWLINE keystore_types = [k.get_type_text() for k in self.wallet.get_keystores()]NEWLINE grid = QGridLayout()NEWLINE basename = os.path.basename(self.wallet.storage.path)NEWLINE grid.addWidget(QLabel(_("Wallet name")+ ':'), 0, 0)NEWLINE grid.addWidget(QLabel(basename), 0, 1)NEWLINE grid.addWidget(QLabel(_("Wallet type")+ ':'), 1, 0)NEWLINE grid.addWidget(QLabel(wallet_type), 1, 1)NEWLINE grid.addWidget(QLabel(_("Script type")+ ':'), 2, 0)NEWLINE grid.addWidget(QLabel(self.wallet.txin_type), 2, 1)NEWLINE grid.addWidget(QLabel(_("Seed available") + ':'), 3, 0)NEWLINE grid.addWidget(QLabel(str(seed_available)), 3, 1)NEWLINE if len(keystore_types) <= 1:NEWLINE grid.addWidget(QLabel(_("Keystore type") + ':'), 4, 0)NEWLINE ks_type = str(keystore_types[0]) if keystore_types else _('No keystore')NEWLINE grid.addWidget(QLabel(ks_type), 4, 1)NEWLINE vbox.addLayout(grid)NEWLINE if self.wallet.is_deterministic():NEWLINE mpk_text = ShowQRTextEdit()NEWLINE mpk_text.setMaximumHeight(150)NEWLINE mpk_text.addCopyButton(self.app)NEWLINE def show_mpk(index):NEWLINE mpk_text.setText(mpk_list[index])NEWLINE mpk_text.repaint() # macOS hack for #4777NEWLINE # only show the combobox in case multiple accounts are availableNEWLINE if len(mpk_list) > 1:NEWLINE def label(key):NEWLINE if isinstance(self.wallet, Multisig_Wallet):NEWLINE return _("cosigner") + f' {key+1} ( keystore: {keystore_types[key]} )'NEWLINE return ''NEWLINE labels = [label(i) for i in range(len(mpk_list))]NEWLINE on_click = lambda clayout: show_mpk(clayout.selected_index())NEWLINE labels_clayout = ChoicesLayout(_("Master Public Keys"), labels, on_click)NEWLINE vbox.addLayout(labels_clayout.layout())NEWLINE else:NEWLINE vbox.addWidget(QLabel(_("Master Public Key")))NEWLINE show_mpk(0)NEWLINE vbox.addWidget(mpk_text)NEWLINE vbox.addStretch(1)NEWLINE vbox.addLayout(Buttons(CloseButton(dialog)))NEWLINE dialog.setLayout(vbox)NEWLINE dialog.exec_()NEWLINENEWLINE def remove_wallet(self):NEWLINE if self.question('\n'.join([NEWLINE _('Delete wallet file?'),NEWLINE "%s"%self.wallet.storage.path,NEWLINE _('If your wallet contains funds, make sure you have saved its seed.')])):NEWLINE self._delete_wallet()NEWLINENEWLINE @protectedNEWLINE def _delete_wallet(self, password):NEWLINE wallet_path = self.wallet.storage.pathNEWLINE basename = os.path.basename(wallet_path)NEWLINE r = self.gui_object.daemon.delete_wallet(wallet_path)NEWLINE self.close()NEWLINE if r:NEWLINE self.show_error(_("Wallet removed: {}").format(basename))NEWLINE else:NEWLINE self.show_error(_("Wallet file not found: {}").format(basename))NEWLINENEWLINE @protectedNEWLINE def show_seed_dialog(self, password):NEWLINE if not self.wallet.has_seed():NEWLINE self.show_message(_('This wallet has no seed'))NEWLINE returnNEWLINE keystore = self.wallet.get_keystore()NEWLINE try:NEWLINE seed = keystore.get_seed(password)NEWLINE passphrase = keystore.get_passphrase(password)NEWLINE except BaseException as e:NEWLINE self.show_error(repr(e))NEWLINE returnNEWLINE from .seed_dialog import SeedDialogNEWLINE d = SeedDialog(self, seed, passphrase)NEWLINE d.exec_()NEWLINENEWLINE def show_qrcode(self, data, title = _("QR code"), parent=None):NEWLINE if not data:NEWLINE returnNEWLINE d = QRDialog(data, parent or self, title)NEWLINE d.exec_()NEWLINENEWLINE @protectedNEWLINE def show_private_key(self, address, password):NEWLINE if not address:NEWLINE returnNEWLINE try:NEWLINE pk, redeem_script = self.wallet.export_private_key(address, password)NEWLINE except Exception as e:NEWLINE self.logger.exception('')NEWLINE self.show_message(repr(e))NEWLINE returnNEWLINE xtype = bitcoin.deserialize_privkey(pk)[0]NEWLINE d = WindowModalDialog(self, _("Private key"))NEWLINE d.setMinimumSize(600, 150)NEWLINE vbox = QVBoxLayout()NEWLINE vbox.addWidget(QLabel(_("Address") + ': ' + address))NEWLINE vbox.addWidget(QLabel(_("Script type") + ': ' + xtype))NEWLINE vbox.addWidget(QLabel(_("Private key") + ':'))NEWLINE keys_e = ShowQRTextEdit(text=pk)NEWLINE keys_e.addCopyButton(self.app)NEWLINE vbox.addWidget(keys_e)NEWLINE if redeem_script:NEWLINE vbox.addWidget(QLabel(_("Redeem Script") + ':'))NEWLINE rds_e = ShowQRTextEdit(text=redeem_script)NEWLINE rds_e.addCopyButton(self.app)NEWLINE vbox.addWidget(rds_e)NEWLINE vbox.addLayout(Buttons(CloseButton(d)))NEWLINE d.setLayout(vbox)NEWLINE d.exec_()NEWLINENEWLINE msg_sign = _("Signing with an address actually means signing with the corresponding "NEWLINE "private key, and verifying with the corresponding public key. The "NEWLINE "address you have entered does not have a unique public key, so these "NEWLINE "operations cannot be performed.") + '\n\n' + \NEWLINE _('The operation is undefined. Not just in Electrum, but in general.')NEWLINENEWLINE @protectedNEWLINE def do_sign(self, address, message, signature, password):NEWLINE address = address.text().strip()NEWLINE message = message.toPlainText().strip()NEWLINE if not bitcoin.is_address(address):NEWLINE self.show_message(_('Invalid Bitcoin address.'))NEWLINE returnNEWLINE if self.wallet.is_watching_only():NEWLINE self.show_message(_('This is a watching-only wallet.'))NEWLINE returnNEWLINE if not self.wallet.is_mine(address):NEWLINE self.show_message(_('Address not in wallet.'))NEWLINE returnNEWLINE txin_type = self.wallet.get_txin_type(address)NEWLINE if txin_type not in ['p2pkh', 'p2wpkh', 'p2wpkh-p2sh']:NEWLINE self.show_message(_('Cannot sign messages with this type of address:') + \NEWLINE ' ' + txin_type + '\n\n' + self.msg_sign)NEWLINE returnNEWLINE task = partial(self.wallet.sign_message, address, message, password)NEWLINENEWLINE def show_signed_message(sig):NEWLINE try:NEWLINE signature.setText(base64.b64encode(sig).decode('ascii'))NEWLINE except RuntimeError:NEWLINE # (signature) wrapped C/C++ object has been deletedNEWLINE passNEWLINENEWLINE self.wallet.thread.add(task, on_success=show_signed_message)NEWLINENEWLINE def do_verify(self, address, message, signature):NEWLINE address = address.text().strip()NEWLINE message = message.toPlainText().strip().encode('utf-8')NEWLINE if not bitcoin.is_address(address):NEWLINE self.show_message(_('Invalid Bitcoin address.'))NEWLINE returnNEWLINE try:NEWLINE # This can throw on invalid base64NEWLINE sig = base64.b64decode(str(signature.toPlainText()))NEWLINE verified = ecc.verify_message_with_address(address, sig, message)NEWLINE except Exception as e:NEWLINE verified = FalseNEWLINE if verified:NEWLINE self.show_message(_("Signature verified"))NEWLINE else:NEWLINE self.show_error(_("Wrong signature"))NEWLINENEWLINE def sign_verify_message(self, address=''):NEWLINE d = WindowModalDialog(self, _('Sign/verify Message'))NEWLINE d.setMinimumSize(610, 290)NEWLINENEWLINE layout = QGridLayout(d)NEWLINENEWLINE message_e = QTextEdit()NEWLINE message_e.setAcceptRichText(False)NEWLINE layout.addWidget(QLabel(_('Message')), 1, 0)NEWLINE layout.addWidget(message_e, 1, 1)NEWLINE layout.setRowStretch(2,3)NEWLINENEWLINE address_e = QLineEdit()NEWLINE address_e.setText(address)NEWLINE layout.addWidget(QLabel(_('Address')), 2, 0)NEWLINE layout.addWidget(address_e, 2, 1)NEWLINENEWLINE signature_e = QTextEdit()NEWLINE signature_e.setAcceptRichText(False)NEWLINE layout.addWidget(QLabel(_('Signature')), 3, 0)NEWLINE layout.addWidget(signature_e, 3, 1)NEWLINE layout.setRowStretch(3,1)NEWLINENEWLINE hbox = QHBoxLayout()NEWLINENEWLINE b = QPushButton(_("Sign"))NEWLINE b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))NEWLINE hbox.addWidget(b)NEWLINENEWLINE b = QPushButton(_("Verify"))NEWLINE b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))NEWLINE hbox.addWidget(b)NEWLINENEWLINE b = QPushButton(_("Close"))NEWLINE b.clicked.connect(d.accept)NEWLINE hbox.addWidget(b)NEWLINE layout.addLayout(hbox, 4, 1)NEWLINE d.exec_()NEWLINENEWLINE @protectedNEWLINE def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):NEWLINE if self.wallet.is_watching_only():NEWLINE self.show_message(_('This is a watching-only wallet.'))NEWLINE returnNEWLINE cyphertext = encrypted_e.toPlainText()NEWLINE task = partial(self.wallet.decrypt_message, pubkey_e.text(), cyphertext, password)NEWLINENEWLINE def setText(text):NEWLINE try:NEWLINE message_e.setText(text.decode('utf-8'))NEWLINE except RuntimeError:NEWLINE # (message_e) wrapped C/C++ object has been deletedNEWLINE passNEWLINENEWLINE self.wallet.thread.add(task, on_success=setText)NEWLINENEWLINE def do_encrypt(self, message_e, pubkey_e, encrypted_e):NEWLINE message = message_e.toPlainText()NEWLINE message = message.encode('utf-8')NEWLINE try:NEWLINE public_key = ecc.ECPubkey(bfh(pubkey_e.text()))NEWLINE except BaseException as e:NEWLINE self.logger.exception('Invalid Public key')NEWLINE self.show_warning(_('Invalid Public key'))NEWLINE returnNEWLINE encrypted = public_key.encrypt_message(message)NEWLINE encrypted_e.setText(encrypted.decode('ascii'))NEWLINENEWLINE def encrypt_message(self, address=''):NEWLINE d = WindowModalDialog(self, _('Encrypt/decrypt Message'))NEWLINE d.setMinimumSize(610, 490)NEWLINENEWLINE layout = QGridLayout(d)NEWLINENEWLINE message_e = QTextEdit()NEWLINE message_e.setAcceptRichText(False)NEWLINE layout.addWidget(QLabel(_('Message')), 1, 0)NEWLINE layout.addWidget(message_e, 1, 1)NEWLINE layout.setRowStretch(2,3)NEWLINENEWLINE pubkey_e = QLineEdit()NEWLINE if address:NEWLINE pubkey = self.wallet.get_public_key(address)NEWLINE pubkey_e.setText(pubkey)NEWLINE layout.addWidget(QLabel(_('Public key')), 2, 0)NEWLINE layout.addWidget(pubkey_e, 2, 1)NEWLINENEWLINE encrypted_e = QTextEdit()NEWLINE encrypted_e.setAcceptRichText(False)NEWLINE layout.addWidget(QLabel(_('Encrypted')), 3, 0)NEWLINE layout.addWidget(encrypted_e, 3, 1)NEWLINE layout.setRowStretch(3,1)NEWLINENEWLINE hbox = QHBoxLayout()NEWLINE b = QPushButton(_("Encrypt"))NEWLINE b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))NEWLINE hbox.addWidget(b)NEWLINENEWLINE b = QPushButton(_("Decrypt"))NEWLINE b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))NEWLINE hbox.addWidget(b)NEWLINENEWLINE b = QPushButton(_("Close"))NEWLINE b.clicked.connect(d.accept)NEWLINE hbox.addWidget(b)NEWLINENEWLINE layout.addLayout(hbox, 4, 1)NEWLINE d.exec_()NEWLINENEWLINE def password_dialog(self, msg=None, parent=None):NEWLINE from .password_dialog import PasswordDialogNEWLINE parent = parent or selfNEWLINE d = PasswordDialog(parent, msg)NEWLINE return d.run()NEWLINENEWLINE def tx_from_text(self, txt):NEWLINE from electrum.transaction import tx_from_strNEWLINE try:NEWLINE tx = tx_from_str(txt)NEWLINE return Transaction(tx)NEWLINE except BaseException as e:NEWLINE self.show_critical(_("Electrum was unable to parse your transaction") + ":\n" + repr(e))NEWLINE returnNEWLINENEWLINE def read_tx_from_qrcode(self):NEWLINE from electrum import qrscannerNEWLINE try:NEWLINE data = qrscanner.scan_barcode(self.config.get_video_device())NEWLINE except BaseException as e:NEWLINE self.show_error(repr(e))NEWLINE returnNEWLINE if not data:NEWLINE returnNEWLINE # if the user scanned a bitcoin URINEWLINE if str(data).startswith("bitcoin:"):NEWLINE self.pay_to_URI(data)NEWLINE returnNEWLINE # else if the user scanned an offline signed txNEWLINE try:NEWLINE data = bh2u(bitcoin.base_decode(data, length=None, base=43))NEWLINE except BaseException as e:NEWLINE self.show_error((_('Could not decode QR code')+':\n{}').format(repr(e)))NEWLINE returnNEWLINE tx = self.tx_from_text(data)NEWLINE if not tx:NEWLINE returnNEWLINE self.show_transaction(tx)NEWLINENEWLINE def read_tx_from_file(self):NEWLINE fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")NEWLINE if not fileName:NEWLINE returnNEWLINE try:NEWLINE with open(fileName, "r") as f:NEWLINE file_content = f.read()NEWLINE except (ValueError, IOError, os.error) as reason:NEWLINE self.show_critical(_("Electrum was unable to open your transaction file") + "\n" + str(reason), title=_("Unable to read file or no transaction found"))NEWLINE returnNEWLINE return self.tx_from_text(file_content)NEWLINENEWLINE def do_process_from_text(self):NEWLINE text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))NEWLINE if not text:NEWLINE returnNEWLINE tx = self.tx_from_text(text)NEWLINE if tx:NEWLINE self.show_transaction(tx)NEWLINENEWLINE def do_process_from_file(self):NEWLINE tx = self.read_tx_from_file()NEWLINE if tx:NEWLINE self.show_transaction(tx)NEWLINENEWLINE def do_process_from_txid(self):NEWLINE from electrum import transactionNEWLINE txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')NEWLINE if ok and txid:NEWLINE txid = str(txid).strip()NEWLINE try:NEWLINE raw_tx = self.network.run_from_another_thread(NEWLINE self.network.get_transaction(txid, timeout=10))NEWLINE except Exception as e:NEWLINE self.show_message(_("Error getting transaction from network") + ":\n" + repr(e))NEWLINE returnNEWLINE tx = transaction.Transaction(raw_tx)NEWLINE self.show_transaction(tx)NEWLINENEWLINE @protectedNEWLINE def export_privkeys_dialog(self, password):NEWLINE if self.wallet.is_watching_only():NEWLINE self.show_message(_("This is a watching-only wallet"))NEWLINE returnNEWLINENEWLINE if isinstance(self.wallet, Multisig_Wallet):NEWLINE self.show_message(_('WARNING: This is a multi-signature wallet.') + '\n' +NEWLINE _('It cannot be "backed up" by simply exporting these private keys.'))NEWLINENEWLINE d = WindowModalDialog(self, _('Private keys'))NEWLINE d.setMinimumSize(980, 300)NEWLINE vbox = QVBoxLayout(d)NEWLINENEWLINE msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),NEWLINE _("Exposing a single private key can compromise your entire wallet!"),NEWLINE _("In particular, DO NOT use 'redeem private key' services proposed by third parties."))NEWLINE vbox.addWidget(QLabel(msg))NEWLINENEWLINE e = QTextEdit()NEWLINE e.setReadOnly(True)NEWLINE vbox.addWidget(e)NEWLINENEWLINE defaultname = 'electrum-private-keys.csv'NEWLINE select_msg = _('Select file to export your private keys to')NEWLINE hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)NEWLINE vbox.addLayout(hbox)NEWLINENEWLINE b = OkButton(d, _('Export'))NEWLINE b.setEnabled(False)NEWLINE vbox.addLayout(Buttons(CancelButton(d), b))NEWLINENEWLINE private_keys = {}NEWLINE addresses = self.wallet.get_addresses()NEWLINE done = FalseNEWLINE cancelled = FalseNEWLINE def privkeys_thread():NEWLINE for addr in addresses:NEWLINE time.sleep(0.1)NEWLINE if done or cancelled:NEWLINE breakNEWLINE privkey = self.wallet.export_private_key(addr, password)[0]NEWLINE private_keys[addr] = privkeyNEWLINE self.computing_privkeys_signal.emit()NEWLINE if not cancelled:NEWLINE self.computing_privkeys_signal.disconnect()NEWLINE self.show_privkeys_signal.emit()NEWLINENEWLINE def show_privkeys():NEWLINE s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))NEWLINE e.setText(s)NEWLINE b.setEnabled(True)NEWLINE self.show_privkeys_signal.disconnect()NEWLINE nonlocal doneNEWLINE done = TrueNEWLINENEWLINE def on_dialog_closed(*args):NEWLINE nonlocal doneNEWLINE nonlocal cancelledNEWLINE if not done:NEWLINE cancelled = TrueNEWLINE self.computing_privkeys_signal.disconnect()NEWLINE self.show_privkeys_signal.disconnect()NEWLINENEWLINE self.computing_privkeys_signal.connect(lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses))))NEWLINE self.show_privkeys_signal.connect(show_privkeys)NEWLINE d.finished.connect(on_dialog_closed)NEWLINE threading.Thread(target=privkeys_thread).start()NEWLINENEWLINE if not d.exec_():NEWLINE done = TrueNEWLINE returnNEWLINENEWLINE filename = filename_e.text()NEWLINE if not filename:NEWLINE returnNEWLINENEWLINE try:NEWLINE self.do_export_privkeys(filename, private_keys, csv_button.isChecked())NEWLINE except (IOError, os.error) as reason:NEWLINE txt = "\n".join([NEWLINE _("Electrum was unable to produce a private key-export."),NEWLINE str(reason)NEWLINE ])NEWLINE self.show_critical(txt, title=_("Unable to create csv"))NEWLINENEWLINE except Exception as e:NEWLINE self.show_message(repr(e))NEWLINE returnNEWLINENEWLINE self.show_message(_("Private keys exported."))NEWLINENEWLINE def do_export_privkeys(self, fileName, pklist, is_csv):NEWLINE with open(fileName, "w+") as f:NEWLINE if is_csv:NEWLINE transaction = csv.writer(f)NEWLINE transaction.writerow(["address", "private_key"])NEWLINE for addr, pk in pklist.items():NEWLINE transaction.writerow(["%34s"%addr,pk])NEWLINE else:NEWLINE f.write(json.dumps(pklist, indent = 4))NEWLINENEWLINE def do_import_labels(self):NEWLINE def import_labels(path):NEWLINE def _validate(data):NEWLINE return data # TODONEWLINENEWLINE def import_labels_assign(data):NEWLINE for key, value in data.items():NEWLINE self.wallet.set_label(key, value)NEWLINE import_meta(path, _validate, import_labels_assign)NEWLINENEWLINE def on_import():NEWLINE self.need_update.set()NEWLINE import_meta_gui(self, _('labels'), import_labels, on_import)NEWLINENEWLINE def do_export_labels(self):NEWLINE def export_labels(filename):NEWLINE export_meta(self.wallet.labels, filename)NEWLINE export_meta_gui(self, _('labels'), export_labels)NEWLINENEWLINE def sweep_key_dialog(self):NEWLINE d = WindowModalDialog(self, title=_('Sweep private keys'))NEWLINE d.setMinimumSize(600, 300)NEWLINENEWLINE vbox = QVBoxLayout(d)NEWLINENEWLINE hbox_top = QHBoxLayout()NEWLINE hbox_top.addWidget(QLabel(_("Enter private keys:")))NEWLINE hbox_top.addWidget(InfoButton(WIF_HELP_TEXT), alignment=Qt.AlignRight)NEWLINE vbox.addLayout(hbox_top)NEWLINENEWLINE keys_e = ScanQRTextEdit(allow_multi=True)NEWLINE keys_e.setTabChangesFocus(True)NEWLINE vbox.addWidget(keys_e)NEWLINENEWLINE addresses = self.wallet.get_unused_addresses()NEWLINE if not addresses:NEWLINE try:NEWLINE addresses = self.wallet.get_receiving_addresses()NEWLINE except AttributeError:NEWLINE addresses = self.wallet.get_addresses()NEWLINE h, address_e = address_field(addresses)NEWLINE vbox.addLayout(h)NEWLINENEWLINE vbox.addStretch(1)NEWLINE button = OkButton(d, _('Sweep'))NEWLINE vbox.addLayout(Buttons(CancelButton(d), button))NEWLINE button.setEnabled(False)NEWLINENEWLINE def get_address():NEWLINE addr = str(address_e.text()).strip()NEWLINE if bitcoin.is_address(addr):NEWLINE return addrNEWLINENEWLINE def get_pk(*, raise_on_error=False):NEWLINE text = str(keys_e.toPlainText())NEWLINE return keystore.get_private_keys(text, raise_on_error=raise_on_error)NEWLINENEWLINE def on_edit():NEWLINE valid_privkeys = FalseNEWLINE try:NEWLINE valid_privkeys = get_pk(raise_on_error=True) is not NoneNEWLINE except Exception as e:NEWLINE button.setToolTip(f'{_("Error")}: {repr(e)}')NEWLINE else:NEWLINE button.setToolTip('')NEWLINE button.setEnabled(get_address() is not None and valid_privkeys)NEWLINE on_address = lambda text: address_e.setStyleSheet((ColorScheme.DEFAULT if get_address() else ColorScheme.RED).as_stylesheet())NEWLINE keys_e.textChanged.connect(on_edit)NEWLINE address_e.textChanged.connect(on_edit)NEWLINE address_e.textChanged.connect(on_address)NEWLINE on_address(str(address_e.text()))NEWLINE if not d.exec_():NEWLINE returnNEWLINE # user pressed "sweep"NEWLINE addr = get_address()NEWLINE try:NEWLINE self.wallet.check_address(addr)NEWLINE except InternalAddressCorruption as e:NEWLINE self.show_error(str(e))NEWLINE raiseNEWLINE try:NEWLINE coins, keypairs = sweep_preparations(get_pk(), self.network)NEWLINE except Exception as e: # FIXME too broad...NEWLINE self.show_message(repr(e))NEWLINE returnNEWLINE self.do_clear()NEWLINE self.tx_external_keypairs = keypairsNEWLINE self.spend_coins(coins)NEWLINE self.payto_e.setText(addr)NEWLINE self.spend_max()NEWLINE self.payto_e.setFrozen(True)NEWLINE self.amount_e.setFrozen(True)NEWLINE self.warn_if_watching_only()NEWLINENEWLINE def _do_import(self, title, header_layout, func):NEWLINE text = text_dialog(self, title, header_layout, _('Import'), allow_multi=True)NEWLINE if not text:NEWLINE returnNEWLINE keys = str(text).split()NEWLINE good_inputs, bad_inputs = func(keys)NEWLINE if good_inputs:NEWLINE msg = '\n'.join(good_inputs[:10])NEWLINE if len(good_inputs) > 10: msg += '\n...'NEWLINE self.show_message(_("The following addresses were added")NEWLINE + f' ({len(good_inputs)}):\n' + msg)NEWLINE if bad_inputs:NEWLINE msg = "\n".join(f"{key[:10]}... ({msg})" for key, msg in bad_inputs[:10])NEWLINE if len(bad_inputs) > 10: msg += '\n...'NEWLINE self.show_error(_("The following inputs could not be imported")NEWLINE + f' ({len(bad_inputs)}):\n' + msg)NEWLINE self.address_list.update()NEWLINE self.history_list.update()NEWLINENEWLINE def import_addresses(self):NEWLINE if not self.wallet.can_import_address():NEWLINE returnNEWLINE title, msg = _('Import addresses'), _("Enter addresses")+':'NEWLINE self._do_import(title, msg, self.wallet.import_addresses)NEWLINENEWLINE @protectedNEWLINE def do_import_privkey(self, password):NEWLINE if not self.wallet.can_import_privkey():NEWLINE returnNEWLINE title = _('Import private keys')NEWLINE header_layout = QHBoxLayout()NEWLINE header_layout.addWidget(QLabel(_("Enter private keys")+':'))NEWLINE header_layout.addWidget(InfoButton(WIF_HELP_TEXT), alignment=Qt.AlignRight)NEWLINE self._do_import(title, header_layout, lambda x: self.wallet.import_private_keys(x, password))NEWLINENEWLINE def update_fiat(self):NEWLINE b = self.fx and self.fx.is_enabled()NEWLINE self.fiat_send_e.setVisible(b)NEWLINE self.fiat_receive_e.setVisible(b)NEWLINE self.history_list.update()NEWLINE self.address_list.refresh_headers()NEWLINE self.address_list.update()NEWLINE self.update_status()NEWLINENEWLINE def settings_dialog(self):NEWLINE self.need_restart = FalseNEWLINE d = WindowModalDialog(self, _('Preferences'))NEWLINE vbox = QVBoxLayout()NEWLINE tabs = QTabWidget()NEWLINE gui_widgets = []NEWLINE fee_widgets = []NEWLINE tx_widgets = []NEWLINE id_widgets = []NEWLINENEWLINE # languageNEWLINE lang_help = _('Select which language is used in the GUI (after restart).')NEWLINE lang_label = HelpLabel(_('Language') + ':', lang_help)NEWLINE lang_combo = QComboBox()NEWLINE from electrum.i18n import languagesNEWLINE lang_combo.addItems(list(languages.values()))NEWLINE lang_keys = list(languages.keys())NEWLINE lang_cur_setting = self.config.get("language", '')NEWLINE try:NEWLINE index = lang_keys.index(lang_cur_setting)NEWLINE except ValueError: # not in listNEWLINE index = 0NEWLINE lang_combo.setCurrentIndex(index)NEWLINE if not self.config.is_modifiable('language'):NEWLINE for w in [lang_combo, lang_label]: w.setEnabled(False)NEWLINE def on_lang(x):NEWLINE lang_request = list(languages.keys())[lang_combo.currentIndex()]NEWLINE if lang_request != self.config.get('language'):NEWLINE self.config.set_key("language", lang_request, True)NEWLINE self.need_restart = TrueNEWLINE lang_combo.currentIndexChanged.connect(on_lang)NEWLINE gui_widgets.append((lang_label, lang_combo))NEWLINENEWLINE nz_help = _('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"')NEWLINE nz_label = HelpLabel(_('Zeros after decimal point') + ':', nz_help)NEWLINE nz = QSpinBox()NEWLINE nz.setMinimum(0)NEWLINE nz.setMaximum(self.decimal_point)NEWLINE nz.setValue(self.num_zeros)NEWLINE if not self.config.is_modifiable('num_zeros'):NEWLINE for w in [nz, nz_label]: w.setEnabled(False)NEWLINE def on_nz():NEWLINE value = nz.value()NEWLINE if self.num_zeros != value:NEWLINE self.num_zeros = valueNEWLINE self.config.set_key('num_zeros', value, True)NEWLINE self.history_list.update()NEWLINE self.address_list.update()NEWLINE nz.valueChanged.connect(on_nz)NEWLINE gui_widgets.append((nz_label, nz))NEWLINENEWLINE msg = '\n'.join([NEWLINE _('Time based: fee rate is based on average confirmation time estimates'),NEWLINE _('Mempool based: fee rate is targeting a depth in the memory pool')NEWLINE ]NEWLINE )NEWLINE fee_type_label = HelpLabel(_('Fee estimation') + ':', msg)NEWLINE fee_type_combo = QComboBox()NEWLINE fee_type_combo.addItems([_('Static'), _('ETA'), _('Mempool')])NEWLINE fee_type_combo.setCurrentIndex((2 if self.config.use_mempool_fees() else 1) if self.config.is_dynfee() else 0)NEWLINE def on_fee_type(x):NEWLINE self.config.set_key('mempool_fees', x==2)NEWLINE self.config.set_key('dynamic_fees', x>0)NEWLINE self.fee_slider.update()NEWLINE fee_type_combo.currentIndexChanged.connect(on_fee_type)NEWLINE fee_widgets.append((fee_type_label, fee_type_combo))NEWLINENEWLINE feebox_cb = QCheckBox(_('Edit fees manually'))NEWLINE feebox_cb.setChecked(self.config.get('show_fee', False))NEWLINE feebox_cb.setToolTip(_("Show fee edit box in send tab."))NEWLINE def on_feebox(x):NEWLINE self.config.set_key('show_fee', x == Qt.Checked)NEWLINE self.fee_adv_controls.setVisible(bool(x))NEWLINE feebox_cb.stateChanged.connect(on_feebox)NEWLINE fee_widgets.append((feebox_cb, None))NEWLINENEWLINE use_rbf = self.config.get('use_rbf', True)NEWLINE use_rbf_cb = QCheckBox(_('Use Replace-By-Fee'))NEWLINE use_rbf_cb.setChecked(use_rbf)NEWLINE use_rbf_cb.setToolTip(NEWLINE _('If you check this box, your transactions will be marked as non-final,') + '\n' + \NEWLINE _('and you will have the possibility, while they are unconfirmed, to replace them with transactions that pay higher fees.') + '\n' + \NEWLINE _('Note that some merchants do not accept non-final transactions until they are confirmed.'))NEWLINE def on_use_rbf(x):NEWLINE self.config.set_key('use_rbf', bool(x))NEWLINE batch_rbf_cb.setEnabled(bool(x))NEWLINE use_rbf_cb.stateChanged.connect(on_use_rbf)NEWLINE fee_widgets.append((use_rbf_cb, None))NEWLINENEWLINE batch_rbf_cb = QCheckBox(_('Batch RBF transactions'))NEWLINE batch_rbf_cb.setChecked(self.config.get('batch_rbf', False))NEWLINE batch_rbf_cb.setEnabled(use_rbf)NEWLINE batch_rbf_cb.setToolTip(NEWLINE _('If you check this box, your unconfirmed transactions will be consolidated into a single transaction.') + '\n' + \NEWLINE _('This will save fees.'))NEWLINE def on_batch_rbf(x):NEWLINE self.config.set_key('batch_rbf', bool(x))NEWLINE batch_rbf_cb.stateChanged.connect(on_batch_rbf)NEWLINE fee_widgets.append((batch_rbf_cb, None))NEWLINENEWLINE msg = _('OpenAlias record, used to receive coins and to sign payment requests.') + '\n\n'\NEWLINE + _('The following alias providers are available:') + '\n'\NEWLINE + '\n'.join(['https://cryptoname.co/', 'http://xmr.link']) + '\n\n'\NEWLINE + 'For more information, see https://openalias.org'NEWLINE alias_label = HelpLabel(_('OpenAlias') + ':', msg)NEWLINE alias = self.config.get('alias','')NEWLINE alias_e = QLineEdit(alias)NEWLINE def set_alias_color():NEWLINE if not self.config.get('alias'):NEWLINE alias_e.setStyleSheet("")NEWLINE returnNEWLINE if self.alias_info:NEWLINE alias_addr, alias_name, validated = self.alias_infoNEWLINE alias_e.setStyleSheet((ColorScheme.GREEN if validated else ColorScheme.RED).as_stylesheet(True))NEWLINE else:NEWLINE alias_e.setStyleSheet(ColorScheme.RED.as_stylesheet(True))NEWLINE def on_alias_edit():NEWLINE alias_e.setStyleSheet("")NEWLINE alias = str(alias_e.text())NEWLINE self.config.set_key('alias', alias, True)NEWLINE if alias:NEWLINE self.fetch_alias()NEWLINE set_alias_color()NEWLINE self.alias_received_signal.connect(set_alias_color)NEWLINE alias_e.editingFinished.connect(on_alias_edit)NEWLINE id_widgets.append((alias_label, alias_e))NEWLINENEWLINE # SSL certificateNEWLINE msg = ' '.join([NEWLINE _('SSL certificate used to sign payment requests.'),NEWLINE _('Use setconfig to set ssl_chain and ssl_privkey.'),NEWLINE ])NEWLINE if self.config.get('ssl_privkey') or self.config.get('ssl_chain'):NEWLINE try:NEWLINE SSL_identity = paymentrequest.check_ssl_config(self.config)NEWLINE SSL_error = NoneNEWLINE except BaseException as e:NEWLINE SSL_identity = "error"NEWLINE SSL_error = repr(e)NEWLINE else:NEWLINE SSL_identity = ""NEWLINE SSL_error = NoneNEWLINE SSL_id_label = HelpLabel(_('SSL certificate') + ':', msg)NEWLINE SSL_id_e = QLineEdit(SSL_identity)NEWLINE SSL_id_e.setStyleSheet((ColorScheme.RED if SSL_error else ColorScheme.GREEN).as_stylesheet(True) if SSL_identity else '')NEWLINE if SSL_error:NEWLINE SSL_id_e.setToolTip(SSL_error)NEWLINE SSL_id_e.setReadOnly(True)NEWLINE id_widgets.append((SSL_id_label, SSL_id_e))NEWLINENEWLINE units = base_units_listNEWLINE msg = (_('Base unit of your wallet.')NEWLINE + '\n1 BTC = 1000 mBTC. 1 mBTC = 1000 bits. 1 bit = 100 sat.\n'NEWLINE + _('This setting affects the Send tab, and all balance related fields.'))NEWLINE unit_label = HelpLabel(_('Base unit') + ':', msg)NEWLINE unit_combo = QComboBox()NEWLINE unit_combo.addItems(units)NEWLINE unit_combo.setCurrentIndex(units.index(self.base_unit()))NEWLINE def on_unit(x, nz):NEWLINE unit_result = units[unit_combo.currentIndex()]NEWLINE if self.base_unit() == unit_result:NEWLINE returnNEWLINE edits = self.amount_e, self.fee_e, self.receive_amount_eNEWLINE amounts = [edit.get_amount() for edit in edits]NEWLINE self.decimal_point = base_unit_name_to_decimal_point(unit_result)NEWLINE self.config.set_key('decimal_point', self.decimal_point, True)NEWLINE nz.setMaximum(self.decimal_point)NEWLINE self.history_list.update()NEWLINE self.request_list.update()NEWLINE self.address_list.update()NEWLINE for edit, amount in zip(edits, amounts):NEWLINE edit.setAmount(amount)NEWLINE self.update_status()NEWLINE unit_combo.currentIndexChanged.connect(lambda x: on_unit(x, nz))NEWLINE gui_widgets.append((unit_label, unit_combo))NEWLINENEWLINE block_explorers = sorted(util.block_explorer_info().keys())NEWLINE msg = _('Choose which online block explorer to use for functions that open a web browser')NEWLINE block_ex_label = HelpLabel(_('Online Block Explorer') + ':', msg)NEWLINE block_ex_combo = QComboBox()NEWLINE block_ex_combo.addItems(block_explorers)NEWLINE block_ex_combo.setCurrentIndex(block_ex_combo.findText(util.block_explorer(self.config)))NEWLINE def on_be(x):NEWLINE be_result = block_explorers[block_ex_combo.currentIndex()]NEWLINE self.config.set_key('block_explorer', be_result, True)NEWLINE block_ex_combo.currentIndexChanged.connect(on_be)NEWLINE gui_widgets.append((block_ex_label, block_ex_combo))NEWLINENEWLINE from electrum import qrscannerNEWLINE system_cameras = qrscanner._find_system_cameras()NEWLINE qr_combo = QComboBox()NEWLINE qr_combo.addItem("Default","default")NEWLINE for camera, device in system_cameras.items():NEWLINE qr_combo.addItem(camera, device)NEWLINE #combo.addItem("Manually specify a device", config.get("video_device"))NEWLINE index = qr_combo.findData(self.config.get("video_device"))NEWLINE qr_combo.setCurrentIndex(index)NEWLINE msg = _("Install the zbar package to enable this.")NEWLINE qr_label = HelpLabel(_('Video Device') + ':', msg)NEWLINE qr_combo.setEnabled(qrscanner.libzbar is not None)NEWLINE on_video_device = lambda x: self.config.set_key("video_device", qr_combo.itemData(x), True)NEWLINE qr_combo.currentIndexChanged.connect(on_video_device)NEWLINE gui_widgets.append((qr_label, qr_combo))NEWLINENEWLINE colortheme_combo = QComboBox()NEWLINE colortheme_combo.addItem(_('Light'), 'default')NEWLINE colortheme_combo.addItem(_('Dark'), 'dark')NEWLINE index = colortheme_combo.findData(self.config.get('qt_gui_color_theme', 'default'))NEWLINE colortheme_combo.setCurrentIndex(index)NEWLINE colortheme_label = QLabel(_('Color theme') + ':')NEWLINE def on_colortheme(x):NEWLINE self.config.set_key('qt_gui_color_theme', colortheme_combo.itemData(x), True)NEWLINE self.need_restart = TrueNEWLINE colortheme_combo.currentIndexChanged.connect(on_colortheme)NEWLINE gui_widgets.append((colortheme_label, colortheme_combo))NEWLINENEWLINE updatecheck_cb = QCheckBox(_("Automatically check for software updates"))NEWLINE updatecheck_cb.setChecked(self.config.get('check_updates', False))NEWLINE def on_set_updatecheck(v):NEWLINE self.config.set_key('check_updates', v == Qt.Checked, save=True)NEWLINE updatecheck_cb.stateChanged.connect(on_set_updatecheck)NEWLINE gui_widgets.append((updatecheck_cb, None))NEWLINENEWLINE filelogging_cb = QCheckBox(_("Write logs to file"))NEWLINE filelogging_cb.setChecked(bool(self.config.get('log_to_file', False)))NEWLINE def on_set_filelogging(v):NEWLINE self.config.set_key('log_to_file', v == Qt.Checked, save=True)NEWLINE self.need_restart = TrueNEWLINE filelogging_cb.stateChanged.connect(on_set_filelogging)NEWLINE filelogging_cb.setToolTip(_('Debug logs can be persisted to disk. These are useful for troubleshooting.'))NEWLINE gui_widgets.append((filelogging_cb, None))NEWLINENEWLINE usechange_cb = QCheckBox(_('Use change addresses'))NEWLINE usechange_cb.setChecked(self.wallet.use_change)NEWLINE if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)NEWLINE def on_usechange(x):NEWLINE usechange_result = x == Qt.CheckedNEWLINE if self.wallet.use_change != usechange_result:NEWLINE self.wallet.use_change = usechange_resultNEWLINE self.wallet.storage.put('use_change', self.wallet.use_change)NEWLINE multiple_cb.setEnabled(self.wallet.use_change)NEWLINE usechange_cb.stateChanged.connect(on_usechange)NEWLINE usechange_cb.setToolTip(_('Using change addresses makes it more difficult for other people to track your transactions.'))NEWLINE tx_widgets.append((usechange_cb, None))NEWLINENEWLINE def on_multiple(x):NEWLINE multiple = x == Qt.CheckedNEWLINE if self.wallet.multiple_change != multiple:NEWLINE self.wallet.multiple_change = multipleNEWLINE self.wallet.storage.put('multiple_change', multiple)NEWLINE multiple_change = self.wallet.multiple_changeNEWLINE multiple_cb = QCheckBox(_('Use multiple change addresses'))NEWLINE multiple_cb.setEnabled(self.wallet.use_change)NEWLINE multiple_cb.setToolTip('\n'.join([NEWLINE _('In some cases, use up to 3 change addresses in order to break 'NEWLINE 'up large coin amounts and obfuscate the recipient address.'),NEWLINE _('This may result in higher transactions fees.')NEWLINE ]))NEWLINE multiple_cb.setChecked(multiple_change)NEWLINE multiple_cb.stateChanged.connect(on_multiple)NEWLINE tx_widgets.append((multiple_cb, None))NEWLINENEWLINE def fmt_docs(key, klass):NEWLINE lines = [ln.lstrip(" ") for ln in klass.__doc__.split("\n")]NEWLINE return '\n'.join([key, "", " ".join(lines)])NEWLINENEWLINE choosers = sorted(coinchooser.COIN_CHOOSERS.keys())NEWLINE if len(choosers) > 1:NEWLINE chooser_name = coinchooser.get_name(self.config)NEWLINE msg = _('Choose coin (UTXO) selection method. The following are available:\n\n')NEWLINE msg += '\n\n'.join(fmt_docs(*item) for item in coinchooser.COIN_CHOOSERS.items())NEWLINE chooser_label = HelpLabel(_('Coin selection') + ':', msg)NEWLINE chooser_combo = QComboBox()NEWLINE chooser_combo.addItems(choosers)NEWLINE i = choosers.index(chooser_name) if chooser_name in choosers else 0NEWLINE chooser_combo.setCurrentIndex(i)NEWLINE def on_chooser(x):NEWLINE chooser_name = choosers[chooser_combo.currentIndex()]NEWLINE self.config.set_key('coin_chooser', chooser_name)NEWLINE chooser_combo.currentIndexChanged.connect(on_chooser)NEWLINE tx_widgets.append((chooser_label, chooser_combo))NEWLINENEWLINE def on_unconf(x):NEWLINE self.config.set_key('confirmed_only', bool(x))NEWLINE conf_only = self.config.get('confirmed_only', False)NEWLINE unconf_cb = QCheckBox(_('Spend only confirmed coins'))NEWLINE unconf_cb.setToolTip(_('Spend only confirmed inputs.'))NEWLINE unconf_cb.setChecked(conf_only)NEWLINE unconf_cb.stateChanged.connect(on_unconf)NEWLINE tx_widgets.append((unconf_cb, None))NEWLINENEWLINE def on_outrounding(x):NEWLINE self.config.set_key('coin_chooser_output_rounding', bool(x))NEWLINE enable_outrounding = self.config.get('coin_chooser_output_rounding', False)NEWLINE outrounding_cb = QCheckBox(_('Enable output value rounding'))NEWLINE outrounding_cb.setToolTip(NEWLINE _('Set the value of the change output so that it has similar precision to the other outputs.') + '\n' +NEWLINE _('This might improve your privacy somewhat.') + '\n' +NEWLINE _('If enabled, at most 100 satoshis might be lost due to this, per transaction.'))NEWLINE outrounding_cb.setChecked(enable_outrounding)NEWLINE outrounding_cb.stateChanged.connect(on_outrounding)NEWLINE tx_widgets.append((outrounding_cb, None))NEWLINENEWLINE # Fiat CurrencyNEWLINE hist_checkbox = QCheckBox()NEWLINE hist_capgains_checkbox = QCheckBox()NEWLINE fiat_address_checkbox = QCheckBox()NEWLINE ccy_combo = QComboBox()NEWLINE ex_combo = QComboBox()NEWLINENEWLINE def update_currencies():NEWLINE if not self.fx: returnNEWLINE currencies = sorted(self.fx.get_currencies(self.fx.get_history_config()))NEWLINE ccy_combo.clear()NEWLINE ccy_combo.addItems([_('None')] + currencies)NEWLINE if self.fx.is_enabled():NEWLINE ccy_combo.setCurrentIndex(ccy_combo.findText(self.fx.get_currency()))NEWLINENEWLINE def update_history_cb():NEWLINE if not self.fx: returnNEWLINE hist_checkbox.setChecked(self.fx.get_history_config())NEWLINE hist_checkbox.setEnabled(self.fx.is_enabled())NEWLINENEWLINE def update_fiat_address_cb():NEWLINE if not self.fx: returnNEWLINE fiat_address_checkbox.setChecked(self.fx.get_fiat_address_config())NEWLINENEWLINE def update_history_capgains_cb():NEWLINE if not self.fx: returnNEWLINE hist_capgains_checkbox.setChecked(self.fx.get_history_capital_gains_config())NEWLINE hist_capgains_checkbox.setEnabled(hist_checkbox.isChecked())NEWLINENEWLINE def update_exchanges():NEWLINE if not self.fx: returnNEWLINE b = self.fx.is_enabled()NEWLINE ex_combo.setEnabled(b)NEWLINE if b:NEWLINE h = self.fx.get_history_config()NEWLINE c = self.fx.get_currency()NEWLINE exchanges = self.fx.get_exchanges_by_ccy(c, h)NEWLINE else:NEWLINE exchanges = self.fx.get_exchanges_by_ccy('USD', False)NEWLINE ex_combo.blockSignals(True)NEWLINE ex_combo.clear()NEWLINE ex_combo.addItems(sorted(exchanges))NEWLINE ex_combo.setCurrentIndex(ex_combo.findText(self.fx.config_exchange()))NEWLINE ex_combo.blockSignals(False)NEWLINENEWLINE def on_currency(hh):NEWLINE if not self.fx: returnNEWLINE b = bool(ccy_combo.currentIndex())NEWLINE ccy = str(ccy_combo.currentText()) if b else NoneNEWLINE self.fx.set_enabled(b)NEWLINE if b and ccy != self.fx.ccy:NEWLINE self.fx.set_currency(ccy)NEWLINE update_history_cb()NEWLINE update_exchanges()NEWLINE self.update_fiat()NEWLINENEWLINE def on_exchange(idx):NEWLINE exchange = str(ex_combo.currentText())NEWLINE if self.fx and self.fx.is_enabled() and exchange and exchange != self.fx.exchange.name():NEWLINE self.fx.set_exchange(exchange)NEWLINENEWLINE def on_history(checked):NEWLINE if not self.fx: returnNEWLINE self.fx.set_history_config(checked)NEWLINE update_exchanges()NEWLINE self.history_model.refresh('on_history')NEWLINE if self.fx.is_enabled() and checked:NEWLINE self.fx.trigger_update()NEWLINE update_history_capgains_cb()NEWLINENEWLINE def on_history_capgains(checked):NEWLINE if not self.fx: returnNEWLINE self.fx.set_history_capital_gains_config(checked)NEWLINE self.history_model.refresh('on_history_capgains')NEWLINENEWLINE def on_fiat_address(checked):NEWLINE if not self.fx: returnNEWLINE self.fx.set_fiat_address_config(checked)NEWLINE self.address_list.refresh_headers()NEWLINE self.address_list.update()NEWLINENEWLINE update_currencies()NEWLINE update_history_cb()NEWLINE update_history_capgains_cb()NEWLINE update_fiat_address_cb()NEWLINE update_exchanges()NEWLINE ccy_combo.currentIndexChanged.connect(on_currency)NEWLINE hist_checkbox.stateChanged.connect(on_history)NEWLINE hist_capgains_checkbox.stateChanged.connect(on_history_capgains)NEWLINE fiat_address_checkbox.stateChanged.connect(on_fiat_address)NEWLINE ex_combo.currentIndexChanged.connect(on_exchange)NEWLINENEWLINE fiat_widgets = []NEWLINE fiat_widgets.append((QLabel(_('Fiat currency')), ccy_combo))NEWLINE fiat_widgets.append((QLabel(_('Show history rates')), hist_checkbox))NEWLINE fiat_widgets.append((QLabel(_('Show capital gains in history')), hist_capgains_checkbox))NEWLINE fiat_widgets.append((QLabel(_('Show Fiat balance for addresses')), fiat_address_checkbox))NEWLINE fiat_widgets.append((QLabel(_('Source')), ex_combo))NEWLINENEWLINE tabs_info = [NEWLINE (fee_widgets, _('Fees')),NEWLINE (tx_widgets, _('Transactions')),NEWLINE (gui_widgets, _('General')),NEWLINE (fiat_widgets, _('Fiat')),NEWLINE (id_widgets, _('Identity')),NEWLINE ]NEWLINE for widgets, name in tabs_info:NEWLINE tab = QWidget()NEWLINE grid = QGridLayout(tab)NEWLINE grid.setColumnStretch(0,1)NEWLINE for a,b in widgets:NEWLINE i = grid.rowCount()NEWLINE if b:NEWLINE if a:NEWLINE grid.addWidget(a, i, 0)NEWLINE grid.addWidget(b, i, 1)NEWLINE else:NEWLINE grid.addWidget(a, i, 0, 1, 2)NEWLINE tabs.addTab(tab, name)NEWLINENEWLINE vbox.addWidget(tabs)NEWLINE vbox.addStretch(1)NEWLINE vbox.addLayout(Buttons(CloseButton(d)))NEWLINE d.setLayout(vbox)NEWLINENEWLINE # run the dialogNEWLINE d.exec_()NEWLINENEWLINE if self.fx:NEWLINE self.fx.trigger_update()NEWLINENEWLINE self.alias_received_signal.disconnect(set_alias_color)NEWLINENEWLINE run_hook('close_settings_dialog')NEWLINE if self.need_restart:NEWLINE self.show_warning(_('Please restart Electrum to activate the new GUI settings'), title=_('Success'))NEWLINENEWLINENEWLINE def closeEvent(self, event):NEWLINE # It seems in some rare cases this closeEvent() is called twiceNEWLINE if not self.cleaned_up:NEWLINE self.cleaned_up = TrueNEWLINE self.clean_up()NEWLINE event.accept()NEWLINENEWLINE def clean_up(self):NEWLINE self.wallet.thread.stop()NEWLINE if self.network:NEWLINE self.network.unregister_callback(self.on_network)NEWLINE self.network.unregister_callback(self.on_quotes)NEWLINE self.network.unregister_callback(self.on_history)NEWLINE self.config.set_key("is_maximized", self.isMaximized())NEWLINE if not self.isMaximized():NEWLINE g = self.geometry()NEWLINE self.wallet.storage.put("winpos-qt", [g.left(),g.top(),NEWLINE g.width(),g.height()])NEWLINE self.config.set_key("console-history", self.console.history[-50:],NEWLINE True)NEWLINE if self.qr_window:NEWLINE self.qr_window.close()NEWLINE self.close_wallet()NEWLINENEWLINE self.gui_object.timer.timeout.disconnect(self.timer_actions)NEWLINE self.gui_object.close_window(self)NEWLINENEWLINE def plugins_dialog(self):NEWLINE self.pluginsdialog = d = WindowModalDialog(self, _('Electrum Plugins'))NEWLINENEWLINE plugins = self.gui_object.pluginsNEWLINENEWLINE vbox = QVBoxLayout(d)NEWLINENEWLINE # pluginsNEWLINE scroll = QScrollArea()NEWLINE scroll.setEnabled(True)NEWLINE scroll.setWidgetResizable(True)NEWLINE scroll.setMinimumSize(400,250)NEWLINE vbox.addWidget(scroll)NEWLINENEWLINE w = QWidget()NEWLINE scroll.setWidget(w)NEWLINE w.setMinimumHeight(plugins.count() * 35)NEWLINENEWLINE grid = QGridLayout()NEWLINE grid.setColumnStretch(0,1)NEWLINE w.setLayout(grid)NEWLINENEWLINE settings_widgets = {}NEWLINENEWLINE def enable_settings_widget(p, name, i):NEWLINE widget = settings_widgets.get(name)NEWLINE if not widget and p and p.requires_settings():NEWLINE widget = settings_widgets[name] = p.settings_widget(d)NEWLINE grid.addWidget(widget, i, 1)NEWLINE if widget:NEWLINE widget.setEnabled(bool(p and p.is_enabled()))NEWLINENEWLINE def do_toggle(cb, name, i):NEWLINE p = plugins.toggle(name)NEWLINE cb.setChecked(bool(p))NEWLINE enable_settings_widget(p, name, i)NEWLINE run_hook('init_qt', self.gui_object)NEWLINENEWLINE for i, descr in enumerate(plugins.descriptions.values()):NEWLINE full_name = descr['__name__']NEWLINE prefix, _separator, name = full_name.rpartition('.')NEWLINE p = plugins.get(name)NEWLINE if descr.get('registers_keystore'):NEWLINE continueNEWLINE try:NEWLINE cb = QCheckBox(descr['fullname'])NEWLINE plugin_is_loaded = p is not NoneNEWLINE cb_enabled = (not plugin_is_loaded and plugins.is_available(name, self.wallet)NEWLINE or plugin_is_loaded and p.can_user_disable())NEWLINE cb.setEnabled(cb_enabled)NEWLINE cb.setChecked(plugin_is_loaded and p.is_enabled())NEWLINE grid.addWidget(cb, i, 0)NEWLINE enable_settings_widget(p, name, i)NEWLINE cb.clicked.connect(partial(do_toggle, cb, name, i))NEWLINE msg = descr['description']NEWLINE if descr.get('requires'):NEWLINE msg += '\n\n' + _('Requires') + ':\n' + '\n'.join(map(lambda x: x[1], descr.get('requires')))NEWLINE grid.addWidget(HelpButton(msg), i, 2)NEWLINE except Exception:NEWLINE self.logger.exception(f"cannot display plugin {name}")NEWLINE grid.setRowStretch(len(plugins.descriptions.values()), 1)NEWLINE vbox.addLayout(Buttons(CloseButton(d)))NEWLINE d.exec_()NEWLINENEWLINE def cpfp(self, parent_tx, new_tx):NEWLINE total_size = parent_tx.estimated_size() + new_tx.estimated_size()NEWLINE parent_fee = self.wallet.get_tx_fee(parent_tx)NEWLINE if parent_fee is None:NEWLINE self.show_error(_("Can't CPFP: unknown fee for parent transaction."))NEWLINE returnNEWLINE d = WindowModalDialog(self, _('Child Pays for Parent'))NEWLINE vbox = QVBoxLayout(d)NEWLINE msg = (NEWLINE "A CPFP is a transaction that sends an unconfirmed output back to "NEWLINE "yourself, with a high fee. The goal is to have miners confirm "NEWLINE "the parent transaction in order to get the fee attached to the "NEWLINE "child transaction.")NEWLINE vbox.addWidget(WWLabel(_(msg)))NEWLINE msg2 = ("The proposed fee is computed using your "NEWLINE "fee/kB settings, applied to the total size of both child and "NEWLINE "parent transactions. After you broadcast a CPFP transaction, "NEWLINE "it is normal to see a new unconfirmed transaction in your history.")NEWLINE vbox.addWidget(WWLabel(_(msg2)))NEWLINE grid = QGridLayout()NEWLINE grid.addWidget(QLabel(_('Total size') + ':'), 0, 0)NEWLINE grid.addWidget(QLabel('%d bytes'% total_size), 0, 1)NEWLINE max_fee = new_tx.output_value()NEWLINE grid.addWidget(QLabel(_('Input amount') + ':'), 1, 0)NEWLINE grid.addWidget(QLabel(self.format_amount(max_fee) + ' ' + self.base_unit()), 1, 1)NEWLINE output_amount = QLabel('')NEWLINE grid.addWidget(QLabel(_('Output amount') + ':'), 2, 0)NEWLINE grid.addWidget(output_amount, 2, 1)NEWLINE fee_e = BTCAmountEdit(self.get_decimal_point)NEWLINE # FIXME with dyn fees, without estimates, there are all kinds of crashes hereNEWLINE combined_fee = QLabel('')NEWLINE combined_feerate = QLabel('')NEWLINE def on_fee_edit(x):NEWLINE out_amt = max_fee - fee_e.get_amount()NEWLINE out_amt_str = (self.format_amount(out_amt) + ' ' + self.base_unit()) if out_amt else ''NEWLINE output_amount.setText(out_amt_str)NEWLINE comb_fee = parent_fee + fee_e.get_amount()NEWLINE comb_fee_str = (self.format_amount(comb_fee) + ' ' + self.base_unit()) if comb_fee else ''NEWLINE combined_fee.setText(comb_fee_str)NEWLINE comb_feerate = comb_fee / total_size * 1000NEWLINE comb_feerate_str = self.format_fee_rate(comb_feerate) if comb_feerate else ''NEWLINE combined_feerate.setText(comb_feerate_str)NEWLINE fee_e.textChanged.connect(on_fee_edit)NEWLINE def get_child_fee_from_total_feerate(fee_per_kb):NEWLINE fee = fee_per_kb * total_size / 1000 - parent_feeNEWLINE fee = min(max_fee, fee)NEWLINE fee = max(total_size, fee) # pay at least 1 sat/byte for combined sizeNEWLINE return feeNEWLINE suggested_feerate = self.config.fee_per_kb()NEWLINE if suggested_feerate is None:NEWLINE self.show_error(f'''{_("Can't CPFP'")}: {_('Dynamic fee estimates not available')}''')NEWLINE returnNEWLINE fee = get_child_fee_from_total_feerate(suggested_feerate)NEWLINE fee_e.setAmount(fee)NEWLINE grid.addWidget(QLabel(_('Fee for child') + ':'), 3, 0)NEWLINE grid.addWidget(fee_e, 3, 1)NEWLINE def on_rate(dyn, pos, fee_rate):NEWLINE fee = get_child_fee_from_total_feerate(fee_rate)NEWLINE fee_e.setAmount(fee)NEWLINE fee_slider = FeeSlider(self, self.config, on_rate)NEWLINE fee_slider.update()NEWLINE grid.addWidget(fee_slider, 4, 1)NEWLINE grid.addWidget(QLabel(_('Total fee') + ':'), 5, 0)NEWLINE grid.addWidget(combined_fee, 5, 1)NEWLINE grid.addWidget(QLabel(_('Total feerate') + ':'), 6, 0)NEWLINE grid.addWidget(combined_feerate, 6, 1)NEWLINE vbox.addLayout(grid)NEWLINE vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))NEWLINE if not d.exec_():NEWLINE returnNEWLINE fee = fee_e.get_amount()NEWLINE if fee > max_fee:NEWLINE self.show_error(_('Max fee exceeded'))NEWLINE returnNEWLINE new_tx = self.wallet.cpfp(parent_tx, fee)NEWLINE new_tx.set_rbf(True)NEWLINE self.show_transaction(new_tx)NEWLINENEWLINE def bump_fee_dialog(self, tx):NEWLINE fee = self.wallet.get_tx_fee(tx)NEWLINE if fee is None:NEWLINE self.show_error(_("Can't bump fee: unknown fee for original transaction."))NEWLINE returnNEWLINE tx_label = self.wallet.get_label(tx.txid())NEWLINE tx_size = tx.estimated_size()NEWLINE old_fee_rate = fee / tx_size # sat/vbyteNEWLINE d = WindowModalDialog(self, _('Bump Fee'))NEWLINE vbox = QVBoxLayout(d)NEWLINE vbox.addWidget(WWLabel(_("Increase your transaction's fee to improve its position in mempool.")))NEWLINE vbox.addWidget(QLabel(_('Current Fee') + ': %s'% self.format_amount(fee) + ' ' + self.base_unit()))NEWLINE vbox.addWidget(QLabel(_('Current Fee rate') + ': %s' % self.format_fee_rate(1000 * old_fee_rate)))NEWLINE vbox.addWidget(QLabel(_('New Fee rate') + ':'))NEWLINENEWLINE def on_textedit_rate():NEWLINE fee_slider.deactivate()NEWLINE feerate_e = FeerateEdit(lambda: 0)NEWLINE feerate_e.setAmount(max(old_fee_rate * 1.5, old_fee_rate + 1))NEWLINE feerate_e.textEdited.connect(on_textedit_rate)NEWLINE vbox.addWidget(feerate_e)NEWLINENEWLINE def on_slider_rate(dyn, pos, fee_rate):NEWLINE fee_slider.activate()NEWLINE if fee_rate is not None:NEWLINE feerate_e.setAmount(fee_rate / 1000)NEWLINE fee_slider = FeeSlider(self, self.config, on_slider_rate)NEWLINE fee_slider.deactivate()NEWLINE vbox.addWidget(fee_slider)NEWLINE cb = QCheckBox(_('Final'))NEWLINE vbox.addWidget(cb)NEWLINE vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))NEWLINE if not d.exec_():NEWLINE returnNEWLINE is_final = cb.isChecked()NEWLINE new_fee_rate = feerate_e.get_amount()NEWLINE try:NEWLINE new_tx = self.wallet.bump_fee(tx=tx, new_fee_rate=new_fee_rate, config=self.config)NEWLINE except CannotBumpFee as e:NEWLINE self.show_error(str(e))NEWLINE returnNEWLINE if is_final:NEWLINE new_tx.set_rbf(False)NEWLINE self.show_transaction(new_tx, tx_label)NEWLINENEWLINE def save_transaction_into_wallet(self, tx):NEWLINE win = self.top_level_window()NEWLINE try:NEWLINE if not self.wallet.add_transaction(tx.txid(), tx):NEWLINE win.show_error(_("Transaction could not be saved.") + "\n" +NEWLINE _("It conflicts with current history."))NEWLINE return FalseNEWLINE except AddTransactionException as e:NEWLINE win.show_error(e)NEWLINE return FalseNEWLINE else:NEWLINE self.wallet.storage.write()NEWLINE # need to update at least: history_list, utxo_list, address_listNEWLINE self.need_update.set()NEWLINE msg = (_("Transaction added to wallet history.") + '\n\n' +NEWLINE _("Note: this is an offline transaction, if you want the network "NEWLINE "to see it, you need to broadcast it."))NEWLINE win.msg_box(QPixmap(icon_path("offline_tx.png")), None, _('Success'), msg)NEWLINE return TrueNEWLINE
from __future__ import annotationsNEWLINENEWLINEimport subprocessNEWLINEimport sysNEWLINEfrom urllib.parse import ParseResultNEWLINENEWLINEimport requestsNEWLINENEWLINEimport murfeyNEWLINENEWLINENEWLINEdef check(api_base: ParseResult, install: bool = True, force: bool = False):NEWLINE """NEWLINE Verify that the current client version can run against the selected server.NEWLINE If the version number is outside the allowed range then this can triggerNEWLINE an update on the client, and in that case will terminate the process.NEWLINE """NEWLINE version_check_url = api_base._replace(NEWLINE path="/version", query=f"client_version={murfey.__version__}"NEWLINE )NEWLINE server_reply = requests.get(version_check_url.geturl())NEWLINE if server_reply.status_code != 200:NEWLINE raise ValueError(f"Server unreachable ({server_reply.status_code})")NEWLINE versions = server_reply.json()NEWLINE if not install:NEWLINE returnNEWLINE print(NEWLINE f"Murfey {murfey.__version__} connected to Murfey server {versions['server']}"NEWLINE )NEWLINE if versions["client-needs-update"] or versions["client-needs-downgrade"]:NEWLINE # Proceed with mandatory installationNEWLINE if versions["client-needs-update"]:NEWLINE print("This version of Murfey must be updated before continuing.")NEWLINE if versions["client-needs-downgrade"]:NEWLINE print(NEWLINE "This version of Murfey is too new for the server and must be downgraded before continuing."NEWLINE )NEWLINE result = install_murfey(api_base, versions["server"])NEWLINE if result:NEWLINE print("\nMurfey has been updated. Please restart Murfey")NEWLINE exit()NEWLINE else:NEWLINE exit("Error occurred while updating Murfey")NEWLINENEWLINE if versions["server"] != murfey.__version__:NEWLINE if force:NEWLINE result = install_murfey(api_base, versions["server"])NEWLINE if result:NEWLINE print("\nMurfey has been updated. Please restart Murfey")NEWLINE exit()NEWLINE else:NEWLINE exit("Error occurred while updating Murfey")NEWLINE else:NEWLINE print("An update is available, install with 'murfey update'.")NEWLINENEWLINENEWLINEdef install_murfey(api_base: ParseResult, version: str) -> bool:NEWLINE """Install a specific version of the Murfey client.NEWLINE Return 'true' on success and 'false' on error."""NEWLINENEWLINE assert api_base.hostname is not NoneNEWLINE result = subprocess.run(NEWLINE [NEWLINE sys.executable,NEWLINE "-mpip",NEWLINE "install",NEWLINE "--trusted-host",NEWLINE api_base.hostname,NEWLINE "-i",NEWLINE api_base._replace(path="/pypi", query="").geturl(),NEWLINE f"murfey[client]=={version}",NEWLINE ]NEWLINE )NEWLINE return result.returncode == 0NEWLINE
from django.conf.urls import urlNEWLINEfrom . import viewsNEWLINEfrom django.conf.urls.static import staticNEWLINEfrom django.conf import settingsNEWLINENEWLINEurlpatterns=[NEWLINE url('^$',views.home,name='home'),NEWLINE url(r'^add/news$',views.add_news,name='addnews'),NEWLINE url(r'^add/business$',views.add_business,name='addbusiness'),NEWLINE url(r'^create/profile$',views.createprofile,name='createprofile'),NEWLINE url(r'business/(\d+)',views.single_business,name='singlebusiness'),NEWLINE url(r'news/(\d+)',views.single_news,name="singlenews"),NEWLINE url(r'profile/(?P<userid>\d+)',views.single_profile,name='singleprofile'),NEWLINE url(r'^update/profile$',views.updateprofile,name='updateprofile'),NEWLINE url(r'^searchbusiness/', views.search_neighborhood, name='search_neighborhood'),NEWLINE url(r'^searchlocation/', views.search_business, name='search_business')NEWLINENEWLINE]NEWLINENEWLINEif settings.DEBUG:NEWLINE urlpatterns+= static(settings.MEDIA_URL, document_root = settings.MEDIA_ROOT)NEWLINE
import pytestNEWLINEimport torchNEWLINEimport torch.nn.functional as FNEWLINEimport torchtestNEWLINENEWLINEfrom scattering_compositional_learner import ScatteringCompositionalLearnerNEWLINENEWLINENEWLINE@pytest.mark.parametrize('image_size', [80, 160])NEWLINEdef test_forward(image_size):NEWLINE x = torch.rand(4, 16, image_size, image_size)NEWLINE y = torch.randint(8, (4,), dtype=torch.long)NEWLINE scl = ScatteringCompositionalLearner(image_size=image_size)NEWLINE optimiser = torch.optim.Adam(scl.parameters())NEWLINE torchtest.test_suite(NEWLINE model=scl,NEWLINE loss_fn=F.cross_entropy,NEWLINE optim=optimiser,NEWLINE batch=[x, y],NEWLINE test_inf_vals=True,NEWLINE test_nan_vals=True,NEWLINE test_vars_change=TrueNEWLINE )NEWLINE
import getpassNEWLINEimport os.path as pathNEWLINEimport osNEWLINEimport numpy as npNEWLINEfrom scipy.stats import describeNEWLINENEWLINEHOST, PORT = "141.37.176.188", 22NEWLINEBASE_URL = "/michael/DoD/baumer_test_prod_var/512x512_samplescale1.0_patchscale1.0/samples"NEWLINEBASE_DIR = "__sftpcache__"NEWLINENEWLINEclass SFTP():NEWLINE def __init__(self, host=HOST, port=PORT, user=None, password=None, base_dir=BASE_DIR):NEWLINE import paramikoNEWLINE if path.exists(base_dir) == False: os.makedirs(base_dir)NEWLINENEWLINE # Open a transportNEWLINE self.transport = paramiko.Transport((host, port))NEWLINE if user is None:NEWLINE user = input("Username:") NEWLINE if password is None:NEWLINE password = getpass.getpass("Password for " + user + ":") #Prompts for passwordNEWLINE self.transport.connect(username = user, password = password)NEWLINE self.sftp = paramiko.SFTPClient.from_transport(self.transport)NEWLINENEWLINE def cd(self, path):NEWLINE self.sftp.chdir(path)NEWLINE return selfNEWLINENEWLINE def get(self, src, dest=BASE_DIR):NEWLINE if src[-1] == "*":NEWLINE # download directory modeNEWLINE local_paths = []NEWLINE for f in dir(self):NEWLINE local_paths.append(os.path.join(dest, f))NEWLINE self.sftp.get(f, local_paths[-1])NEWLINE return local_pathsNEWLINE else:NEWLINE self.sftp.get(src, dest)NEWLINE return [dest]NEWLINENEWLINE def put(self, src, dest):NEWLINE self.sftp.put(src, dest)NEWLINE NEWLINE def __len__(self):NEWLINE return len(self.sftp.listdir())NEWLINE NEWLINE def __dir__(self):NEWLINE return self.sftp.listdir()NEWLINE NEWLINE def close(self):NEWLINE self.sftp.close()NEWLINE self.transport.close()
from dataclasses import dataclass, asdictNEWLINEfrom typing import ListNEWLINENEWLINEfrom bson import ObjectIdNEWLINEfrom passlib.hash import pbkdf2_sha256 as sha256NEWLINEfrom pymongo.database import DatabaseNEWLINENEWLINEfrom app import dbNEWLINEdb: DatabaseNEWLINENEWLINENEWLINE@dataclassNEWLINEclass UserModel:NEWLINE username: strNEWLINE pwhash: strNEWLINE id: str = NoneNEWLINENEWLINE @staticmethodNEWLINE def hash_password(password):NEWLINE return sha256.hash(password)NEWLINENEWLINE @staticmethodNEWLINE def verify_password(password, hash):NEWLINE return sha256.verify(password, hash)NEWLINENEWLINE def insert(self):NEWLINE new_doc = db['users'].insert_one({NEWLINE 'username': self.username,NEWLINE 'pwhash': self.pwhashNEWLINE })NEWLINE self.id = str(new_doc.inserted_id)NEWLINENEWLINE @classmethodNEWLINE def find_by_id(cls, userid):NEWLINE doc = db['users'].find_one({'_id': ObjectId(userid)})NEWLINE if doc is not None:NEWLINE return UserModel(doc['username'], doc['pwhash'], str(doc['_id']))NEWLINENEWLINE @classmethodNEWLINE def find_by_username(cls, username):NEWLINE doc = db['users'].find_one({'username': username})NEWLINE if doc is not None:NEWLINE return UserModel(username=doc['username'], pwhash=doc['pwhash'], id=str(doc['_id']))NEWLINENEWLINENEWLINE@dataclassNEWLINEclass NoteModel:NEWLINE title: strNEWLINE markdown: strNEWLINE userid: strNEWLINE tags: List[str]NEWLINE id: str = NoneNEWLINENEWLINE def insert(self):NEWLINE new_doc = db['notes'].insert_one({NEWLINE 'title': self.title,NEWLINE 'markdown': self.markdown,NEWLINE 'userid': ObjectId(self.userid),NEWLINE 'tags': self.tagsNEWLINE })NEWLINE self.id = str(new_doc.inserted_id)NEWLINENEWLINE def update(self):NEWLINE update_result = db['notes'].update({'_id': ObjectId(self.id)}, {'$set': {NEWLINE 'title': self.title,NEWLINE 'markdown': self.markdown,NEWLINE 'userid': ObjectId(self.userid),NEWLINE 'tags': self.tagsNEWLINE }})NEWLINENEWLINE @classmethodNEWLINE def find_by_id(cls, noteid):NEWLINE doc = db['notes'].find_one({'_id': ObjectId(noteid)})NEWLINE if doc is not None:NEWLINE return NoteModel(doc['title'], doc['markdown'], str(doc['userid']), doc['tags'], str(doc['_id']))NEWLINENEWLINE @classmethodNEWLINE def delete_by_id(cls, noteid):NEWLINE db['notes'].delete_one({'_id': ObjectId(noteid)})NEWLINENEWLINE @classmethodNEWLINE def find_all_by_userid(cls, userid):NEWLINE docs = db['notes'].find({'userid': ObjectId(userid)})NEWLINE return [NEWLINE NoteModel(doc['title'], doc['markdown'], str(doc['userid']), doc['tags'], str(doc['_id']))NEWLINE for doc in docsNEWLINE ]NEWLINE
import numpy as npNEWLINEimport pandas as pdNEWLINEimport tensorflow as tfNEWLINEtf.enable_eager_execution()NEWLINEfrom sklearn.preprocessing import StandardScalerNEWLINEimport pickleNEWLINEimport sysNEWLINEsys.path.append('../../rumm')NEWLINEimport langNEWLINEimport netsNEWLINEimport bayesianNEWLINENEWLINE# constantsNEWLINEBATCH_SZ = 1024NEWLINENEWLINE'''NEWLINEzinc_df = pd.read_csv('res.csv', sep='\t')NEWLINENEWLINE# shuffle it, and conduct training-test splitNEWLINEzinc_df = zinc_df.sample(zinc_df.shape[0])NEWLINEn_samples = zinc_df.shape[0]NEWLINEn_tr = int(0.8 * n_samples)NEWLINEy_tr = np.array(zinc_df.values[:n_tr, 2:-2], dtype=np.float32)NEWLINEx_tr = zinc_df.values[:n_tr, -2]NEWLINEy_te = np.array(zinc_df.values[n_tr:, 2:-2], dtype=np.float32)NEWLINEx_te = zinc_df.values[n_tr:, -2]NEWLINEx_tr = np.apply_along_axis(lambda x: 'G' + x + 'E', 0, x_tr)NEWLINEx_te = np.apply_along_axis(lambda x: 'G' + x + 'E', 0, x_te)NEWLINEfp_tr = np.array([list(line) for line in zinc_df.values[:n_tr, -1].tolist()], dtype='int')NEWLINEfp_te = np.array([list(line) for line in zinc_df.values[n_tr:, -1].tolist()], dtype='int')NEWLINE# calculate the std of y_tr for loss functionNEWLINEscaler = StandardScaler(copy=False)NEWLINEy_tr = scaler.fit_transform(y_tr)NEWLINEy_te = scaler.transform(y_te)NEWLINEf_handle = open('scaler.p', 'wb')NEWLINEpickle.dump(scaler, f_handle)NEWLINEf_handle.close()NEWLINENEWLINE# save the dataset for later useNEWLINEnp.save('y_tr', y_tr)NEWLINEnp.save('x_tr', x_tr)NEWLINEnp.save('y_te', y_te)NEWLINEnp.save('x_te', x_te)NEWLINEnp.save('fp_tr', fp_tr)NEWLINEnp.save('fp_te', fp_te)NEWLINE'''NEWLINENEWLINEy_tr = np.load('y_tr.npy')NEWLINEx_tr = np.load('x_tr.npy')NEWLINEfp_tr = np.load('fp_tr.npy')NEWLINEn_samples = y_tr.shape[0]NEWLINENEWLINEf_handle = open('lang_obj.p', 'rb')NEWLINElang_obj = pickle.load(f_handle)NEWLINEf_handle.close()NEWLINEvocab_size = len(lang_obj.idx2ch) + 1NEWLINENEWLINE# define modelsNEWLINEenc_f = nets.GRUEncoder(vocab_size=vocab_size, batch_sz = BATCH_SZ, reverse=False,NEWLINE enc_units = 128)NEWLINEenc_b = nets.GRUEncoder(vocab_size=vocab_size, batch_sz = BATCH_SZ, reverse=True,NEWLINE enc_units = 128)NEWLINEconv_encoder = nets.ConvEncoder(NEWLINE conv_units=[256, 512, 512],NEWLINE # pool_sizes=[8, 8, 8, 8],NEWLINE conv_kernel_sizes=[8, 12, 16],NEWLINE fcs=[128, 0.2, 'elu',NEWLINE 512, 0.2, 'elu',NEWLINE 512])NEWLINEfcuk = nets.FullyConnectedUnits([512, 'leaky_relu', 0.25, 512])NEWLINEd_mean = nets.FullyConnectedUnits([32])NEWLINEd_log_var = nets.FullyConnectedUnits([32])NEWLINENEWLINEfcuk_props = nets.FullyConnectedUnits([9])NEWLINEfcuk_fp = nets.FullyConnectedUnits([167, 'sigmoid'])NEWLINEdecoder = nets.OneHotDecoder(vocab_size=vocab_size, dec_units = 256)NEWLINEbypass_v_f = nets.FullyConnectedUnits([1])NEWLINEsimple_decoder = nets.SimpleDecoder(vocab_size=vocab_size, dec_units=1024,NEWLINE batch_sz = BATCH_SZ)NEWLINENEWLINE# convert to tensorNEWLINEx_tr = tf.convert_to_tensor(x_tr)NEWLINEy_tr = tf.convert_to_tensor(y_tr)NEWLINEfp_tr = tf.convert_to_tensor(fp_tr)NEWLINENEWLINE# make them into a dataset objectNEWLINEds = tf.data.Dataset.from_tensor_slices((x_tr, y_tr, fp_tr))NEWLINEds = ds.apply(tf.contrib.data.batch_and_drop_remainder(BATCH_SZ))NEWLINENEWLINEx_tr = NoneNEWLINEy_tr = NoneNEWLINEfp_tr = NoneNEWLINEx_te = NoneNEWLINEy_te = NoneNEWLINEfp_te = NoneNEWLINENEWLINENEWLINENEWLINEoptimizer = tf.train.AdamOptimizer(1e-5)NEWLINEanneal_step = tf.constant(100000000.0, dtype=tf.float32)NEWLINENEWLINENEWLINEfor epoch in range(10000):NEWLINE # loop through the batchesNEWLINE for (batch, (xs, ys, fps)) in enumerate(ds):NEWLINE # TODONEWLINE # one training batchNEWLINE n_iter = tf.constant(epoch * int(n_samples) + batch * BATCH_SZ, dtype=tf.float32)NEWLINE kl_anneal = tf.cond(n_iter < anneal_step,NEWLINE lambda: tf.math.sin(tf.div(n_iter, anneal_step) * 0.5 * tf.constant(np.pi, dtype=tf.float32)),NEWLINE lambda: tf.constant(1.0, dtype=tf.float32))NEWLINENEWLINE with tf.GradientTape() as tape: # for descentNEWLINE # trainingNEWLINE eo_f, h_f = enc_f(xs)NEWLINE eo_b, h_b = enc_b(xs)NEWLINE x_attention = tf.concat([h_f, h_b], axis=-1)NEWLINE x_attention = fcuk(x_attention)NEWLINE x_conv = conv_encoder(tf.one_hot(xs, 33))NEWLINE x = tf.concat([x_attention, x_conv], axis=-1)NEWLINE mean = d_mean(x)NEWLINE log_var = d_log_var(x)NEWLINE z_noise = tf.clip_by_norm(tf.random_normal(mean.shape), 1e5) * tf.exp(log_var * .5)NEWLINE z = z_noise + meanNEWLINE ys_hat = fcuk_props(mean)NEWLINE fp_hat = fcuk_fp(mean)NEWLINE loss0 = tf.clip_by_value(tf.losses.mean_squared_error(ys, ys_hat), 0.0, 1e5)NEWLINE loss1 = tf.clip_by_value(tf.losses.log_loss(fps, fp_hat), 0.0, 1e5)NEWLINE xs_bar = decoder(z)NEWLINE loss2 = tf.clip_by_value(NEWLINE tf.reduce_mean(NEWLINE tf.nn.sparse_softmax_cross_entropy_with_logits(labels = xs, logits = xs_bar)), 0.0, 1e20)NEWLINE loss3 = tf.clip_by_value(kl_anneal * tf.reduce_mean(-0.5 * tf.reduce_mean(1 + log_var - tf.square(mean) - tf.exp(log_var), axis=[1])), 1.0, 1e5)NEWLINENEWLINE lt = loss0 + loss1 + loss2 + loss3NEWLINENEWLINE # start grad normNEWLINE variables = conv_encoder.variables +\NEWLINE d_mean.variables + decoder.variables + d_log_var.variables +\NEWLINE enc_f.variables + enc_b.variables + fcuk.variables +\NEWLINE fcuk_props.variables + fcuk_fp.variablesNEWLINENEWLINE gradients = tape.gradient(lt, variables)NEWLINENEWLINE optimizer.apply_gradients(zip(gradients, variables), tf.train.get_or_create_global_step())NEWLINENEWLINE if batch % 100 == 0:NEWLINE print(loss0.numpy(), loss1.numpy(), loss2.numpy(), loss3.numpy())NEWLINENEWLINE if (batch % 1000 == 0) and ( np.isnan(lt.numpy()) == False):NEWLINE fcuk.save_weights('./fcuk.h5')NEWLINE enc_f.save_weights('./enc_f.h5')NEWLINE enc_b.save_weights('./enc_b.h5')NEWLINE fcuk_props.save_weights('./fcuk_props.h5')NEWLINE fcuk_fp.save_weights('./fcuk_fp.h5')NEWLINE d_mean.save_weights('./d_mean.h5')NEWLINE d_log_var.save_weights('./d_log_var.h5')NEWLINE decoder.save_weights('./decoder.h5')NEWLINE bypass_v_f.save_weights('./bypass_v_f.h5')NEWLINE simple_decoder.save_weights('./simple_decoder.h5')NEWLINE
# import datetimeNEWLINE# import websocketNEWLINE# import requestsNEWLINE# import timeNEWLINE# import jsonNEWLINE#NEWLINE# def connectToWebSocket():NEWLINE# print(datetime.datetime.now(), "| [GatewayManager]: Connecting to websocket...")NEWLINE# websocket.enableTrace(True)NEWLINE# print(datetime.datetime.now(), "| [GatewayManager]: Attempting to create connection to websocket...")NEWLINE# try:NEWLINE# ws = websocket.create_connection("ws://" + "0.0.0.0" + ":" + str(4999) + '/events')NEWLINE# print(datetime.datetime.now(), "| [GatewayManager]: Successfully connected to websocket.")NEWLINE# while True:NEWLINE# print('waiting')NEWLINE# ws.send('hello')NEWLINE# time.sleep(5)NEWLINE# except ConnectionError as e:NEWLINE# print(e)NEWLINE# print(datetime.datetime.now(), "| [GatewayManager]: ConnectionError occured. Attempting to reconnect...")NEWLINE# #ws = connectToWebSocket()NEWLINE# requests.put('http://127.0.0.1:4999/broadcast', data=json.dumps({'hello':1}), headers= {'Content-Type': 'application/json'})NEWLINE#NEWLINE#NEWLINE
# coding: utf-8NEWLINE#NEWLINE# Copyright 2014 The Oppia Authors. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS-IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE"""Tests for methods in gadget registry."""NEWLINENEWLINEimport osNEWLINENEWLINEfrom core.domain import gadget_registryNEWLINEfrom core.tests import test_utilsNEWLINEimport feconfNEWLINENEWLINENEWLINEclass GadgetRegistryUnitTests(test_utils.GenericTestBase):NEWLINE """Test for the gadget registry."""NEWLINENEWLINE def test_allowed_gadgets_and_counts(self):NEWLINE """Do sanity checks on the ALLOWED_GADGETS dict in feconf.py."""NEWLINE self.assertEqual(NEWLINE len(gadget_registry.Registry.get_all_gadgets()),NEWLINE len(feconf.ALLOWED_GADGETS))NEWLINENEWLINE for (gadget_name, gadget_definition) in (NEWLINE feconf.ALLOWED_GADGETS.iteritems()):NEWLINE contents = os.listdir(NEWLINE os.path.join(os.getcwd(), gadget_definition['dir']))NEWLINE self.assertIn('%s.py' % gadget_name, contents)NEWLINENEWLINE def test_get_all_specs(self):NEWLINE """Test the get_all_specs() method."""NEWLINE specs_dict = gadget_registry.Registry.get_all_specs()NEWLINE self.assertEqual(NEWLINE len(specs_dict.keys()), len(feconf.ALLOWED_GADGETS))NEWLINE
r"""NEWLINECommon DigraphsNEWLINENEWLINEAll digraphs in Sage can be built through the ``digraphs`` object. In order toNEWLINEbuild a circuit on 15 elements, one can do::NEWLINENEWLINE sage: g = digraphs.Circuit(15)NEWLINENEWLINETo get a circulant graph on 10 vertices in which a vertex `i` has `i+2` andNEWLINE`i+3` as outneighbors::NEWLINENEWLINE sage: p = digraphs.Circulant(10,[2,3])NEWLINENEWLINEMore interestingly, one can get the list of all digraphs that Sage knows how toNEWLINEbuild by typing ``digraphs.`` in Sage and then hitting tab.NEWLINENEWLINE.. csv-table::NEWLINE :class: contentstableNEWLINE :widths: 30, 70NEWLINE :delim: |NEWLINENEWLINE :meth:`~DiGraphGenerators.ButterflyGraph` | Returns a n-dimensional butterfly graph.NEWLINE :meth:`~DiGraphGenerators.Circuit` | Returns the circuit on `n` vertices.NEWLINE :meth:`~DiGraphGenerators.Circulant` | Returns a circulant digraph on `n` vertices from a set of integers.NEWLINE :meth:`~DiGraphGenerators.Complete` | Return a complete digraph on `n` vertices.NEWLINE :meth:`~DiGraphGenerators.DeBruijn` | Returns the De Bruijn digraph with parameters `k,n`.NEWLINE :meth:`~DiGraphGenerators.GeneralizedDeBruijn` | Returns the generalized de Bruijn digraph of order `n` and degree `d`.NEWLINE :meth:`~DiGraphGenerators.ImaseItoh` | Returns the digraph of Imase and Itoh of order `n` and degree `d`.NEWLINE :meth:`~DiGraphGenerators.Kautz` | Returns the Kautz digraph of degree `d` and diameter `D`.NEWLINE :meth:`~DiGraphGenerators.Paley` | Return a Paley digraph on `q` vertices.NEWLINE :meth:`~DiGraphGenerators.Path` | Returns a directed path on `n` vertices.NEWLINE :meth:`~DiGraphGenerators.RandomDirectedGNC` | Returns a random GNC (growing network with copying) digraph with `n` vertices.NEWLINE :meth:`~DiGraphGenerators.RandomDirectedGNM` | Returns a random labelled digraph on `n` nodes and `m` arcs.NEWLINE :meth:`~DiGraphGenerators.RandomDirectedGNP` | Returns a random digraph on `n` nodes.NEWLINE :meth:`~DiGraphGenerators.RandomDirectedGN` | Returns a random GN (growing network) digraph with `n` vertices.NEWLINE :meth:`~DiGraphGenerators.RandomDirectedGNR` | Returns a random GNR (growing network with redirection) digraph.NEWLINE :meth:`~DiGraphGenerators.RandomSemiComplete` | Return a random semi-complete digraph of order `n`.NEWLINE :meth:`~DiGraphGenerators.RandomTournament` | Returns a random tournament on `n` vertices.NEWLINE :meth:`~DiGraphGenerators.TransitiveTournament`| Returns a transitive tournament on `n` vertices.NEWLINE :meth:`~DiGraphGenerators.tournaments_nauty` | Returns all tournaments on `n` vertices using Nauty.NEWLINENEWLINENEWLINEAUTHORS:NEWLINENEWLINE- Robert L. Miller (2006)NEWLINE- Emily A. Kirkman (2006)NEWLINE- Michael C. Yurko (2009)NEWLINE- David Coudert (2012)NEWLINENEWLINEFunctions and methodsNEWLINE---------------------NEWLINENEWLINE"""NEWLINENEWLINE################################################################################NEWLINE# Copyright (C) 2006 Robert L. Miller <rlmillster@gmail.com>NEWLINE# and Emily A. KirkmanNEWLINE# Copyright (C) 2009 Michael C. Yurko <myurko@gmail.com>NEWLINE#NEWLINE# Distributed under the terms of the GNU General Public License (GPL)NEWLINE# http://www.gnu.org/licenses/NEWLINE################################################################################NEWLINEfrom __future__ import print_function, divisionNEWLINEfrom six.moves import rangeNEWLINENEWLINEfrom sage.misc.randstate import current_randstateNEWLINEfrom sage.graphs.digraph import DiGraphNEWLINENEWLINENEWLINEclass DiGraphGenerators():NEWLINE r"""NEWLINE A class consisting of constructors for several common digraphs,NEWLINE including orderly generation of isomorphism class representatives.NEWLINENEWLINE A list of all graphs and graph structures in this database isNEWLINE available via tab completion. Type "digraphs." and then hit tab toNEWLINE see which graphs are available.NEWLINENEWLINE The docstrings include educational information about each namedNEWLINE digraph with the hopes that this class can be used as a reference.NEWLINENEWLINE The constructors currently in this class include::NEWLINENEWLINE Random Directed Graphs:NEWLINE - RandomDirectedGNNEWLINE - RandomDirectedGNCNEWLINE - RandomDirectedGNPNEWLINE - RandomDirectedGNMNEWLINE - RandomDirectedGNRNEWLINE - RandomTournamentNEWLINE - RandomSemiCompleteNEWLINENEWLINE Families of Graphs:NEWLINE - CompleteNEWLINE - DeBruijnNEWLINE - GeneralizedDeBruijnNEWLINE - KautzNEWLINE - PathNEWLINE - ImaseItohNEWLINE - RandomTournamentNEWLINE - TransitiveTournamentNEWLINE - tournaments_nautyNEWLINENEWLINENEWLINENEWLINE ORDERLY GENERATION: digraphs(vertices, property=lambda x: True,NEWLINE augment='edges', size=None)NEWLINENEWLINE Accesses the generator of isomorphism class representatives.NEWLINE Iterates over distinct, exhaustive representatives.NEWLINENEWLINE INPUT:NEWLINENEWLINENEWLINE - ``vertices`` - natural number or ``None`` to infinitely generateNEWLINE bigger and bigger digraphs.NEWLINENEWLINE - ``property`` - any property to be tested on digraphsNEWLINE before generation.NEWLINENEWLINE - ``augment`` - choices:NEWLINENEWLINE - ``'vertices'`` - augments by adding a vertex, andNEWLINE edges incident to that vertex. In this case, all digraphs on *up to*NEWLINE n=vertices are generated. If for any digraph G satisfying theNEWLINE property, every subgraph, obtained from G by deleting one vertexNEWLINE and only edges incident to that vertex, satisfies the property,NEWLINE then this will generate all digraphs with that property. If thisNEWLINE does not hold, then all the digraphs generated will satisfy theNEWLINE property, but there will be some missing.NEWLINENEWLINE - ``'edges'`` - augments a fixed number of vertices byNEWLINE adding one edge In this case, all digraphs on *exactly* n=verticesNEWLINE are generated. If for any graph G satisfying the property, everyNEWLINE subgraph, obtained from G by deleting one edge but not the verticesNEWLINE incident to that edge, satisfies the property, then this willNEWLINE generate all digraphs with that property. If this does not hold,NEWLINE then all the digraphs generated will satisfy the property, butNEWLINE there will be some missing.NEWLINENEWLINE - ``implementation`` - which underlying implementation to use (see DiGraph?)NEWLINENEWLINE - ``sparse`` - ignored if implementation is not ``c_graph``NEWLINENEWLINE EXAMPLES: Print digraphs on 2 or less vertices.NEWLINENEWLINE ::NEWLINENEWLINE sage: for D in digraphs(2, augment='vertices'):NEWLINE ....: print(D)NEWLINE Digraph on 0 verticesNEWLINE Digraph on 1 vertexNEWLINE Digraph on 2 verticesNEWLINE Digraph on 2 verticesNEWLINE Digraph on 2 verticesNEWLINENEWLINE Note that we can also get digraphs with underlying Cython implementation::NEWLINENEWLINE sage: for D in digraphs(2, augment='vertices', implementation='c_graph'):NEWLINE ....: print(D)NEWLINE Digraph on 0 verticesNEWLINE Digraph on 1 vertexNEWLINE Digraph on 2 verticesNEWLINE Digraph on 2 verticesNEWLINE Digraph on 2 verticesNEWLINENEWLINE Print digraphs on 3 vertices.NEWLINENEWLINE ::NEWLINENEWLINE sage: for D in digraphs(3):NEWLINE ....: print(D)NEWLINE Digraph on 3 verticesNEWLINE Digraph on 3 verticesNEWLINE ...NEWLINE Digraph on 3 verticesNEWLINE Digraph on 3 verticesNEWLINENEWLINE Generate all digraphs with 4 vertices and 3 edges.NEWLINENEWLINE ::NEWLINENEWLINE sage: L = digraphs(4, size=3)NEWLINE sage: len(list(L))NEWLINE 13NEWLINENEWLINE Generate all digraphs with 4 vertices and up to 3 edges.NEWLINENEWLINE ::NEWLINENEWLINE sage: L = list(digraphs(4, lambda G: G.size() <= 3))NEWLINE sage: len(L)NEWLINE 20NEWLINE sage: graphs_list.show_graphs(L) # long timeNEWLINENEWLINE Generate all digraphs with degree at most 2, up to 5 vertices.NEWLINENEWLINE ::NEWLINENEWLINE sage: property = lambda G: ( max([G.degree(v) for v in G] + [0]) <= 2 )NEWLINE sage: L = list(digraphs(5, property, augment='vertices'))NEWLINE sage: len(L)NEWLINE 75NEWLINENEWLINE Generate digraphs on the fly: (see http://oeis.org/classic/A000273)NEWLINENEWLINE ::NEWLINENEWLINE sage: for i in range(5):NEWLINE ....: print(len(list(digraphs(i))))NEWLINE 1NEWLINE 1NEWLINE 3NEWLINE 16NEWLINE 218NEWLINENEWLINE REFERENCE:NEWLINENEWLINE - Brendan D. McKay, Isomorph-Free Exhaustive generation. JournalNEWLINE of Algorithms Volume 26, Issue 2, February 1998, pages 306-324.NEWLINE """NEWLINENEWLINE def ButterflyGraph(self, n, vertices='strings'):NEWLINE """NEWLINE Returns a n-dimensional butterfly graph. The vertices consist ofNEWLINE pairs (v,i), where v is an n-dimensional tuple (vector) with binaryNEWLINE entries (or a string representation of such) and i is an integer inNEWLINE [0..n]. A directed edge goes from (v,i) to (w,i+1) if v and w areNEWLINE identical except for possibly v[i] != w[i].NEWLINENEWLINE A butterfly graph has `(2^n)(n+1)` vertices andNEWLINE `n2^{n+1}` edges.NEWLINENEWLINE INPUT:NEWLINENEWLINENEWLINE - ``vertices`` - 'strings' (default) or 'vectors',NEWLINE specifying whether the vertices are zero-one strings or actuallyNEWLINE tuples over GF(2).NEWLINENEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: digraphs.ButterflyGraph(2).edges(labels=False)NEWLINE [(('00', 0), ('00', 1)),NEWLINE (('00', 0), ('10', 1)),NEWLINE (('00', 1), ('00', 2)),NEWLINE (('00', 1), ('01', 2)),NEWLINE (('01', 0), ('01', 1)),NEWLINE (('01', 0), ('11', 1)),NEWLINE (('01', 1), ('00', 2)),NEWLINE (('01', 1), ('01', 2)),NEWLINE (('10', 0), ('00', 1)),NEWLINE (('10', 0), ('10', 1)),NEWLINE (('10', 1), ('10', 2)),NEWLINE (('10', 1), ('11', 2)),NEWLINE (('11', 0), ('01', 1)),NEWLINE (('11', 0), ('11', 1)),NEWLINE (('11', 1), ('10', 2)),NEWLINE (('11', 1), ('11', 2))]NEWLINE sage: digraphs.ButterflyGraph(2,vertices='vectors').edges(labels=False)NEWLINE [(((0, 0), 0), ((0, 0), 1)),NEWLINE (((0, 0), 0), ((1, 0), 1)),NEWLINE (((0, 0), 1), ((0, 0), 2)),NEWLINE (((0, 0), 1), ((0, 1), 2)),NEWLINE (((0, 1), 0), ((0, 1), 1)),NEWLINE (((0, 1), 0), ((1, 1), 1)),NEWLINE (((0, 1), 1), ((0, 0), 2)),NEWLINE (((0, 1), 1), ((0, 1), 2)),NEWLINE (((1, 0), 0), ((0, 0), 1)),NEWLINE (((1, 0), 0), ((1, 0), 1)),NEWLINE (((1, 0), 1), ((1, 0), 2)),NEWLINE (((1, 0), 1), ((1, 1), 2)),NEWLINE (((1, 1), 0), ((0, 1), 1)),NEWLINE (((1, 1), 0), ((1, 1), 1)),NEWLINE (((1, 1), 1), ((1, 0), 2)),NEWLINE (((1, 1), 1), ((1, 1), 2))]NEWLINE """NEWLINE # We could switch to Sage integers to handle arbitrary n.NEWLINE if vertices=='strings':NEWLINE if n>=31:NEWLINE raise NotImplementedError("vertices='strings' is only valid for n<=30.")NEWLINE from sage.graphs.generic_graph_pyx import int_to_binary_stringNEWLINE butterfly = {}NEWLINE for v in range(2 ** n):NEWLINE for i in range(n):NEWLINE w = vNEWLINE w ^= (1 << i) # push 1 to the left by i and xor with wNEWLINE bv = int_to_binary_string(v)NEWLINE bw = int_to_binary_string(w)NEWLINE # pad and reverse the stringsNEWLINE padded_bv = ('0'*(n-len(bv))+bv)[::-1]NEWLINE padded_bw = ('0'*(n-len(bw))+bw)[::-1]NEWLINE butterfly[(padded_bv,i)]=[(padded_bv,i+1), (padded_bw,i+1)]NEWLINE elif vertices=='vectors':NEWLINE from sage.modules.free_module import VectorSpaceNEWLINE from sage.rings.finite_rings.finite_field_constructor import FiniteFieldNEWLINE from copy import copyNEWLINE butterfly = {}NEWLINE for v in VectorSpace(FiniteField(2),n):NEWLINE for i in range(n):NEWLINE w=copy(v)NEWLINE w[i] += 1 # Flip the ith bitNEWLINE # We must call tuple since vectors are mutable. To obtainNEWLINE # a vector from the tuple t, just call vector(t).NEWLINE butterfly[(tuple(v),i)]=[(tuple(v),i+1), (tuple(w),i+1)]NEWLINE else:NEWLINE raise NotImplementedError("vertices must be 'strings' or 'vectors'.")NEWLINE return DiGraph(butterfly)NEWLINENEWLINE def Path(self,n):NEWLINE r"""NEWLINE Returns a directed path on `n` vertices.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- number of vertices in the path.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: g = digraphs.Path(5)NEWLINE sage: g.vertices()NEWLINE [0, 1, 2, 3, 4]NEWLINE sage: g.size()NEWLINE 4NEWLINE sage: g.automorphism_group().cardinality()NEWLINE 1NEWLINE """NEWLINE g = DiGraph(n)NEWLINE g.name("Path")NEWLINENEWLINE if n:NEWLINE g.add_path(list(range(n)))NEWLINENEWLINE g.set_pos({i:(i,0) for i in range(n)})NEWLINE return gNEWLINENEWLINE def Paley(self, q):NEWLINE r"""NEWLINE Return a Paley digraph on `q` vertices.NEWLINENEWLINE Parameter `q` must be the power of a prime number and congruent to 3 modNEWLINE 4.NEWLINENEWLINE .. SEEALSO::NEWLINENEWLINE - :wikipedia:`Paley_graph`NEWLINE - :meth:`~sage.graphs.graph_generators.GraphGenerators.PaleyGraph`NEWLINENEWLINE EXAMPLES:NEWLINENEWLINE A Paley digraph has `n * (n-1) / 2` edges, its underlying graph is aNEWLINE clique, and so it is a tournament::NEWLINENEWLINE sage: g = digraphs.Paley(7); gNEWLINE Paley digraph with parameter 7: Digraph on 7 verticesNEWLINE sage: g.size() == g.order() * (g.order() - 1) / 2NEWLINE TrueNEWLINE sage: g.to_undirected().is_clique()NEWLINE TrueNEWLINENEWLINE A Paley digraph is always self-complementary::NEWLINENEWLINE sage: g.complement().is_isomorphic(g)NEWLINE TrueNEWLINENEWLINE TESTS:NEWLINENEWLINE Wrong parameter::NEWLINENEWLINE sage: digraphs.Paley(6)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: parameter q must be a prime powerNEWLINE sage: digraphs.Paley(5)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: parameter q must be congruent to 3 mod 4NEWLINE """NEWLINE from sage.rings.finite_rings.integer_mod import modNEWLINE from sage.rings.finite_rings.finite_field_constructor import FiniteFieldNEWLINE from sage.arith.all import is_prime_powerNEWLINE if not is_prime_power(q):NEWLINE raise ValueError("parameter q must be a prime power")NEWLINE if not mod(q, 4) == 3:NEWLINE raise ValueError("parameter q must be congruent to 3 mod 4")NEWLINE g = DiGraph([FiniteField(q,'a'), lambda i,j: (i!=j) and (j-i).is_square()],NEWLINE loops=False, name="Paley digraph with parameter {}".format(q))NEWLINE return gNEWLINENEWLINE def TransitiveTournament(self, n):NEWLINE r"""NEWLINE Returns a transitive tournament on `n` vertices.NEWLINENEWLINE In this tournament there is an edge from `i` to `j` if `i<j`.NEWLINENEWLINE See the :wikipedia:`Tournament_(graph_theory)` for more information.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- number of vertices in the tournament.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: g = digraphs.TransitiveTournament(5)NEWLINE sage: g.vertices()NEWLINE [0, 1, 2, 3, 4]NEWLINE sage: g.size()NEWLINE 10NEWLINE sage: g.automorphism_group().cardinality()NEWLINE 1NEWLINENEWLINE .. SEEALSO::NEWLINENEWLINE - :wikipedia:`Tournament_(graph_theory)`NEWLINE - :meth:`~sage.graphs.digraph.DiGraph.is_tournament`NEWLINE - :meth:`~sage.graphs.digraph.DiGraph.is_transitive`NEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.RandomTournament`NEWLINENEWLINE TESTS::NEWLINENEWLINE sage: digraphs.TransitiveTournament(-1)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The number of vertices cannot be strictly negative!NEWLINE """NEWLINE g = DiGraph(n)NEWLINE g.name("Transitive Tournament")NEWLINENEWLINE for i in range(n-1):NEWLINE for j in range(i+1, n):NEWLINE g.add_edge(i, j)NEWLINENEWLINE g._circle_embedding(list(range(n)))NEWLINENEWLINE return gNEWLINENEWLINE def RandomTournament(self, n):NEWLINE r"""NEWLINE Returns a random tournament on `n` vertices.NEWLINENEWLINE For every pair of vertices, the tournament has an edge fromNEWLINE `i` to `j` with probability `1/2`, otherwise it has an edgeNEWLINE from `j` to `i`.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- number of vertices.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: T = digraphs.RandomTournament(10); TNEWLINE Random Tournament: Digraph on 10 verticesNEWLINE sage: T.size() == binomial(10, 2)NEWLINE TrueNEWLINE sage: T.is_tournament()NEWLINE TrueNEWLINE sage: digraphs.RandomTournament(-1)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The number of vertices cannot be strictly negative!NEWLINENEWLINE .. SEEALSO::NEWLINENEWLINE - :wikipedia:`Tournament_(graph_theory)`NEWLINE - :meth:`~sage.graphs.digraph.DiGraph.is_tournament`NEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.TransitiveTournament`NEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.Complete`NEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.RandomSemiComplete`NEWLINE """NEWLINE from sage.misc.prandom import randomNEWLINE g = DiGraph(n)NEWLINE g.name("Random Tournament")NEWLINENEWLINE for i in range(n-1):NEWLINE for j in range(i+1, n):NEWLINE if random() <= .5:NEWLINE g.add_edge(i, j)NEWLINE else:NEWLINE g.add_edge(j, i)NEWLINENEWLINE g._circle_embedding(list(range(n)))NEWLINENEWLINE return gNEWLINENEWLINE def tournaments_nauty(self, n,NEWLINE min_out_degree = None, max_out_degree = None,NEWLINE strongly_connected = False, debug=False, options=""):NEWLINE r"""NEWLINE Returns all tournaments on `n` vertices using Nauty.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- number of vertices.NEWLINENEWLINE - ``min_out_degree``, ``max_out_degree`` (integers) -- if set toNEWLINE ``None`` (default), then the min/max out-degree is not constrained.NEWLINENEWLINE - ``debug`` (boolean) -- if ``True`` the first line of genbg's output toNEWLINE standard error is captured and the first call to the generator'sNEWLINE ``next()`` function will return this line as a string. A line leadingNEWLINE with ">A" indicates a successful initiation of the program with someNEWLINE information on the arguments, while a line beginning with ">E"NEWLINE indicates an error with the input.NEWLINENEWLINE - ``options`` (string) -- anything else that should be forwarded asNEWLINE input to Nauty's genbg. See its documentation for more information :NEWLINE `<http://cs.anu.edu.au/~bdm/nauty/>`_.NEWLINENEWLINENEWLINE .. NOTE::NEWLINENEWLINE To use this method you must first install the Nauty spkg.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: for g in digraphs.tournaments_nauty(4):NEWLINE ....: print(g.edges(labels = False))NEWLINE [(1, 0), (2, 0), (2, 1), (3, 0), (3, 1), (3, 2)]NEWLINE [(1, 0), (1, 3), (2, 0), (2, 1), (3, 0), (3, 2)]NEWLINE [(0, 2), (1, 0), (2, 1), (3, 0), (3, 1), (3, 2)]NEWLINE [(0, 2), (0, 3), (1, 0), (2, 1), (3, 1), (3, 2)]NEWLINE sage: tournaments = digraphs.tournaments_nautyNEWLINE sage: [len(list(tournaments(x))) for x in range(1,8)]NEWLINE [1, 1, 2, 4, 12, 56, 456]NEWLINE sage: [len(list(tournaments(x, strongly_connected = True))) for x in range(1,9)]NEWLINE [1, 0, 1, 1, 6, 35, 353, 6008]NEWLINE """NEWLINE import subprocessNEWLINENEWLINE nauty_input = optionsNEWLINENEWLINE if min_out_degree is None:NEWLINE min_out_degree = 0NEWLINE if max_out_degree is None:NEWLINE max_out_degree = n-1NEWLINENEWLINE nauty_input += " -d"+str(min_out_degree)NEWLINE nauty_input += " -D"+str(max_out_degree)NEWLINENEWLINE if strongly_connected:NEWLINE nauty_input += " -c"NEWLINENEWLINE nauty_input += " "+str(n) +" "NEWLINENEWLINE sp = subprocess.Popen("gentourng {0}".format(nauty_input), shell=True,NEWLINE stdin=subprocess.PIPE, stdout=subprocess.PIPE,NEWLINE stderr=subprocess.PIPE, close_fds=True)NEWLINENEWLINE if debug:NEWLINE yield sp.stderr.readline()NEWLINENEWLINE gen = sp.stdoutNEWLINE while True:NEWLINE try:NEWLINE s = next(gen)NEWLINE except StopIteration:NEWLINE # Exhausted list of graphs from nauty gengNEWLINE returnNEWLINENEWLINE G = DiGraph(n)NEWLINE i = 0NEWLINE j = 1NEWLINE for b in s[:-1]:NEWLINE if b == '0':NEWLINE G.add_edge(i,j)NEWLINE else:NEWLINE G.add_edge(j,i)NEWLINENEWLINE if j == n-1:NEWLINE i += 1NEWLINE j = i+1NEWLINE else:NEWLINE j += 1NEWLINENEWLINE yield GNEWLINENEWLINENEWLINE def Complete(self, n, loops=False):NEWLINE r"""NEWLINE Return the complete digraph on `n` vertices.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- number of vertices.NEWLINENEWLINE - ``loops`` (boolean) -- whether to add loops or not, i.e., edges fromNEWLINE `u` to itself.NEWLINENEWLINE .. SEEALSO::NEWLINENEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.RandomSemiComplete`NEWLINENEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.RandomTournament`NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: n = 10NEWLINE sage: G = digraphs.Complete(n); GNEWLINE Complete digraph: Digraph on 10 verticesNEWLINE sage: G.size() == n*(n-1)NEWLINE TrueNEWLINE sage: G = digraphs.Complete(n, loops=True); GNEWLINE Complete digraph with loops: Looped digraph on 10 verticesNEWLINE sage: G.size() == n*nNEWLINE TrueNEWLINE sage: digraphs.Complete(-1)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The number of vertices cannot be strictly negative!NEWLINE """NEWLINE G = DiGraph(n, name="Complete digraph"+(" with loops" if loops else ''), loops=loops)NEWLINENEWLINE if loops:NEWLINE G.add_edges((u,u) for u in range(n))NEWLINENEWLINE G.add_edges((u,v) for u in range(n) for v in range(n) if u!=v)NEWLINENEWLINE G._circle_embedding(list(range(n)))NEWLINENEWLINE return GNEWLINENEWLINE def Circuit(self,n):NEWLINE r"""NEWLINE Returns the circuit on `n` verticesNEWLINENEWLINE The circuit is an oriented ``CycleGraph``NEWLINENEWLINE EXAMPLES:NEWLINENEWLINE A circuit is the smallest strongly connected digraph::NEWLINENEWLINE sage: circuit = digraphs.Circuit(15)NEWLINE sage: len(circuit.strongly_connected_components()) == 1NEWLINE TrueNEWLINE """NEWLINE g = DiGraph(n)NEWLINE g.name("Circuit")NEWLINENEWLINE if n==0:NEWLINE return gNEWLINE elif n == 1:NEWLINE g.allow_loops(True)NEWLINE g.add_edge(0,0)NEWLINE return gNEWLINE else:NEWLINE g.add_edges([(i,i+1) for i in range(n-1)])NEWLINE g.add_edge(n-1,0)NEWLINE return gNEWLINENEWLINE def Circulant(self,n,integers):NEWLINE r"""NEWLINE Returns a circulant digraph on `n` vertices from a set of integers.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- number of vertices.NEWLINENEWLINE - ``integers`` -- the list of integers such that there is an edge fromNEWLINE `i` to `j` if and only if ``(j-i)%n in integers``.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: digraphs.Circulant(13,[3,5,7])NEWLINE Circulant graph ([3, 5, 7]): Digraph on 13 verticesNEWLINENEWLINE TESTS::NEWLINENEWLINE sage: digraphs.Circulant(13,[3,5,7,"hey"])NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The list must contain only relative integers.NEWLINE sage: digraphs.Circulant(3,[3,5,7,3.4])NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The list must contain only relative integers.NEWLINE """NEWLINE from sage.rings.integer_ring import ZZNEWLINENEWLINE # Bad input and loopsNEWLINE loops = FalseNEWLINE for i in integers:NEWLINE if not i in ZZ:NEWLINE raise ValueError("The list must contain only relative integers.")NEWLINE if (i%n) == 0:NEWLINE loops = TrueNEWLINENEWLINE G = DiGraph(n, name="Circulant graph ("+str(integers)+")", loops=loops)NEWLINENEWLINE G._circle_embedding(list(range(n)))NEWLINE for v in range(n):NEWLINE G.add_edges([(v,(v+j)%n) for j in integers])NEWLINENEWLINE return GNEWLINENEWLINE def DeBruijn(self, k, n, vertices = 'strings'):NEWLINE r"""NEWLINE Returns the De Bruijn digraph with parameters `k,n`.NEWLINENEWLINE The De Bruijn digraph with parameters `k,n` is built upon a set ofNEWLINE vertices equal to the set of words of length `n` from a dictionary ofNEWLINE `k` letters.NEWLINENEWLINE In this digraph, there is an arc `w_1w_2` if `w_2` can be obtained fromNEWLINE `w_1` by removing the leftmost letter and adding a new letter at itsNEWLINE right end. For more information, see the :wikipedia:`De_Bruijn_graph`.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``k`` -- Two possibilities for this parameter :NEWLINE - An integer equal to the cardinality of the alphabet to use, thatNEWLINE is the degree of the digraph to be produced.NEWLINE - An iterable object to be used as the set of letters. The degreeNEWLINE of the resulting digraph is the cardinality of the set ofNEWLINE letters.NEWLINENEWLINE - ``n`` -- An integer equal to the length of words in the De BruijnNEWLINE digraph when ``vertices == 'strings'``, and also to the diameter ofNEWLINE the digraph.NEWLINENEWLINE - ``vertices`` -- 'strings' (default) or 'integers', specifying whetherNEWLINE the vertices are words build upon an alphabet or integers.NEWLINENEWLINE EXAMPLES:NEWLINENEWLINE de Bruijn digraph of degree 2 and diameter 2::NEWLINENEWLINE sage: db = digraphs.DeBruijn(2, 2); dbNEWLINE De Bruijn digraph (k=2, n=2): Looped digraph on 4 verticesNEWLINE sage: db.order(), db.size()NEWLINE (4, 8)NEWLINE sage: db.diameter()NEWLINE 2NEWLINENEWLINE Building a de Bruijn digraph on a different alphabet::NEWLINENEWLINE sage: g = digraphs.DeBruijn(['a', 'b'], 2)NEWLINE sage: g.vertices()NEWLINE ['aa', 'ab', 'ba', 'bb']NEWLINE sage: g.is_isomorphic(db)NEWLINE TrueNEWLINE sage: g = digraphs.DeBruijn(['AA', 'BB'], 2)NEWLINE sage: g.vertices()NEWLINE ['AA,AA', 'AA,BB', 'BB,AA', 'BB,BB']NEWLINE sage: g.is_isomorphic(db)NEWLINE TrueNEWLINENEWLINE TESTS:NEWLINENEWLINE Alphabet of null size or words of length zero::NEWLINENEWLINE sage: digraphs.DeBruijn(5, 0)NEWLINE De Bruijn digraph (k=5, n=0): Looped multi-digraph on 1 vertexNEWLINE sage: digraphs.DeBruijn(0, 0)NEWLINE De Bruijn digraph (k=0, n=0): Looped multi-digraph on 0 verticesNEWLINENEWLINE :trac:`22355`::NEWLINENEWLINE sage: db = digraphs.DeBruijn(2, 2, vertices='strings')NEWLINE sage: db.vertices()NEWLINE ['00', '01', '10', '11']NEWLINE sage: h = digraphs.DeBruijn(2, 2, vertices='integers')NEWLINE sage: h.vertices()NEWLINE [0, 1, 2, 3]NEWLINE sage: db.is_isomorphic(h)NEWLINE TrueNEWLINE sage: digraphs.DeBruijn(0, 0, vertices='integers')NEWLINE De Bruijn digraph (k=0, n=0): Looped multi-digraph on 0 verticesNEWLINE sage: digraphs.DeBruijn(2, 2, vertices='circles')NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: unknown type for verticesNEWLINE """NEWLINE from sage.combinat.words.words import WordsNEWLINE from sage.rings.integer import IntegerNEWLINENEWLINE if vertices == 'strings':NEWLINE W = Words(list(range(k)) if isinstance(k, Integer) else k, n)NEWLINE A = Words(list(range(k)) if isinstance(k, Integer) else k, 1)NEWLINE g = DiGraph(loops=True)NEWLINENEWLINE if n == 0 :NEWLINE g.allow_multiple_edges(True)NEWLINE v = W[0]NEWLINE for a in A:NEWLINE g.add_edge(v.string_rep(), v.string_rep(), a.string_rep())NEWLINE else:NEWLINE for w in W:NEWLINE ww = w[1:]NEWLINE for a in A:NEWLINE g.add_edge(w.string_rep(), (ww*a).string_rep(), a.string_rep())NEWLINENEWLINE elif vertices == 'integers':NEWLINE d = k if isinstance(k, Integer) else len(list(k))NEWLINE if d == 0:NEWLINE g = DiGraph(loops=True, multiedges=True)NEWLINE else:NEWLINE g = digraphs.GeneralizedDeBruijn(d**n, d)NEWLINENEWLINE else:NEWLINE raise ValueError('unknown type for vertices')NEWLINENEWLINE g.name( "De Bruijn digraph (k=%s, n=%s)"%(k,n) )NEWLINE return gNEWLINENEWLINE def GeneralizedDeBruijn(self, n, d):NEWLINE r"""NEWLINE Returns the generalized de Bruijn digraph of order `n` and degree `d`.NEWLINENEWLINE The generalized de Bruijn digraph has been defined in [RPK80]_NEWLINE [RPK83]_. It has vertex set `V=\{0, 1,..., n-1\}` and there is an arcNEWLINE from vertex `u \in V` to all vertices `v \in V` such thatNEWLINE `v \equiv (u*d + a) \mod{n}` with `0 \leq a < d`.NEWLINENEWLINE When `n = d^{D}`, the generalized de Bruijn digraph is isomorphic to theNEWLINE de Bruijn digraph of degree `d` and diameter `D`.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` -- is the number of vertices of the digraphNEWLINENEWLINE - ``d`` -- is the degree of the digraphNEWLINENEWLINE .. SEEALSO::NEWLINENEWLINE * :meth:`sage.graphs.generic_graph.GenericGraph.is_circulant` --NEWLINE checks whether a (di)graph is circulant, and/or returns allNEWLINE possible sets of parameters.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: GB = digraphs.GeneralizedDeBruijn(8, 2)NEWLINE sage: GB.is_isomorphic(digraphs.DeBruijn(2, 3), certificate = True)NEWLINE (True, {0: '000', 1: '001', 2: '010', 3: '011', 4: '100', 5: '101', 6: '110', 7: '111'})NEWLINENEWLINE TESTS:NEWLINENEWLINE An exception is raised when the degree is less than one::NEWLINENEWLINE sage: G = digraphs.GeneralizedDeBruijn(2, 0)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The generalized de Bruijn digraph is defined for degree at least one.NEWLINENEWLINE An exception is raised when the order of the graph is less than one::NEWLINENEWLINE sage: G = digraphs.GeneralizedDeBruijn(0, 2)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The generalized de Bruijn digraph is defined for at least one vertex.NEWLINENEWLINENEWLINE REFERENCES:NEWLINENEWLINE .. [RPK80] \S. M. Reddy, D. K. Pradhan, and J. Kuhl. Directed graphs withNEWLINE minimal diameter and maximal connectivity, School Eng., Oakland Univ.,NEWLINE Rochester MI, Tech. Rep., July 1980.NEWLINENEWLINE .. [RPK83] \S. Reddy, P. Raghavan, and J. Kuhl. A Class of Graphs forNEWLINE Processor Interconnection. *IEEE International Conference on ParallelNEWLINE Processing*, pages 154-157, Los Alamitos, Ca., USA, August 1983.NEWLINE """NEWLINE if n < 1:NEWLINE raise ValueError("The generalized de Bruijn digraph is defined for at least one vertex.")NEWLINE if d < 1:NEWLINE raise ValueError("The generalized de Bruijn digraph is defined for degree at least one.")NEWLINENEWLINE GB = DiGraph(loops = True)NEWLINE GB.allow_multiple_edges(True)NEWLINE for u in range(n):NEWLINE for a in range(u*d, u*d+d):NEWLINE GB.add_edge(u, a%n)NEWLINENEWLINE GB.name( "Generalized de Bruijn digraph (n=%s, d=%s)"%(n,d) )NEWLINE return GBNEWLINENEWLINENEWLINE def ImaseItoh(self, n, d):NEWLINE r"""NEWLINE Returns the digraph of Imase and Itoh of order `n` and degree `d`.NEWLINENEWLINE The digraph of Imase and Itoh has been defined in [II83]_. It has vertexNEWLINE set `V=\{0, 1,..., n-1\}` and there is an arc from vertex `u \in V` toNEWLINE all vertices `v \in V` such that `v \equiv (-u*d-a-1) \mod{n}` withNEWLINE `0 \leq a < d`.NEWLINENEWLINE When `n = d^{D}`, the digraph of Imase and Itoh is isomorphic to the deNEWLINE Bruijn digraph of degree `d` and diameter `D`. When `n = d^{D-1}(d+1)`,NEWLINE the digraph of Imase and Itoh is isomorphic to the Kautz digraphNEWLINE [Kautz68]_ of degree `d` and diameter `D`.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` -- is the number of vertices of the digraphNEWLINENEWLINE - ``d`` -- is the degree of the digraphNEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: II = digraphs.ImaseItoh(8, 2)NEWLINE sage: II.is_isomorphic(digraphs.DeBruijn(2, 3), certificate = True)NEWLINE (True, {0: '010', 1: '011', 2: '000', 3: '001', 4: '110', 5: '111', 6: '100', 7: '101'})NEWLINENEWLINE sage: II = digraphs.ImaseItoh(12, 2)NEWLINE sage: II.is_isomorphic(digraphs.Kautz(2, 3), certificate = True)NEWLINE (True, {0: '010', 1: '012', 2: '021', 3: '020', 4: '202', 5: '201', 6: '210', 7: '212', 8: '121', 9: '120', 10: '102', 11: '101'})NEWLINENEWLINENEWLINE TESTS:NEWLINENEWLINE An exception is raised when the degree is less than one::NEWLINENEWLINE sage: G = digraphs.ImaseItoh(2, 0)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The digraph of Imase and Itoh is defined for degree at least one.NEWLINENEWLINE An exception is raised when the order of the graph is less than two::NEWLINENEWLINE sage: G = digraphs.ImaseItoh(1, 2)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The digraph of Imase and Itoh is defined for at least two vertices.NEWLINENEWLINENEWLINE REFERENCE:NEWLINENEWLINE .. [II83] \M. Imase and M. Itoh. A design for directed graphs withNEWLINE minimum diameter, *IEEE Trans. Comput.*, vol. C-32, pp. 782-784, 1983.NEWLINE """NEWLINE if n < 2:NEWLINE raise ValueError("The digraph of Imase and Itoh is defined for at least two vertices.")NEWLINE if d < 1:NEWLINE raise ValueError("The digraph of Imase and Itoh is defined for degree at least one.")NEWLINENEWLINE II = DiGraph(loops = True)NEWLINE II.allow_multiple_edges(True)NEWLINE for u in range(n):NEWLINE for a in range(-u*d-d, -u*d):NEWLINE II.add_edge(u, a % n)NEWLINENEWLINE II.name( "Imase and Itoh digraph (n=%s, d=%s)"%(n,d) )NEWLINE return IINEWLINENEWLINENEWLINE def Kautz(self, k, D, vertices = 'strings'):NEWLINE r"""NEWLINE Returns the Kautz digraph of degree `d` and diameter `D`.NEWLINENEWLINE The Kautz digraph has been defined in [Kautz68]_. The Kautz digraph ofNEWLINE degree `d` and diameter `D` has `d^{D-1}(d+1)` vertices. This digraph isNEWLINE build upon a set of vertices equal to the set of words of length `D`NEWLINE from an alphabet of `d+1` letters such that consecutive letters areNEWLINE differents. There is an arc from vertex `u` to vertex `v` if `v` can beNEWLINE obtained from `u` by removing the leftmost letter and adding a newNEWLINE letter, distinct from the rightmost letter of `u`, at the right end.NEWLINENEWLINE The Kautz digraph of degree `d` and diameter `D` is isomorphic to theNEWLINE digraph of Imase and Itoh [II83]_ of degree `d` and orderNEWLINE `d^{D-1}(d+1)`.NEWLINENEWLINE See the :wikipedia:`Kautz_graph` for more information.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``k`` -- Two possibilities for this parameter :NEWLINE - An integer equal to the degree of the digraph to be produced, thatNEWLINE is the cardinality minus one of the alphabet to use.NEWLINE - An iterable object to be used as the set of letters. The degree ofNEWLINE the resulting digraph is the cardinality of the set of lettersNEWLINE minus one.NEWLINENEWLINE - ``D`` -- An integer equal to the diameter of the digraph, and also toNEWLINE the length of a vertex label when ``vertices == 'strings'``.NEWLINENEWLINE - ``vertices`` -- 'strings' (default) or 'integers', specifying whetherNEWLINE the vertices are words build upon an alphabet or integers.NEWLINENEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: K = digraphs.Kautz(2, 3)NEWLINE sage: K.is_isomorphic(digraphs.ImaseItoh(12, 2), certificate = True)NEWLINE (True,NEWLINE {'010': 0,NEWLINE '012': 1,NEWLINE '020': 3,NEWLINE '021': 2,NEWLINE '101': 11,NEWLINE '102': 10,NEWLINE '120': 9,NEWLINE '121': 8,NEWLINE '201': 5,NEWLINE '202': 4,NEWLINE '210': 6,NEWLINE '212': 7})NEWLINENEWLINE sage: K = digraphs.Kautz([1,'a','B'], 2)NEWLINE sage: K.edges()NEWLINE [('1B', 'B1', '1'), ('1B', 'Ba', 'a'), ('1a', 'a1', '1'), ('1a', 'aB', 'B'), ('B1', '1B', 'B'), ('B1', '1a', 'a'), ('Ba', 'a1', '1'), ('Ba', 'aB', 'B'), ('a1', '1B', 'B'), ('a1', '1a', 'a'), ('aB', 'B1', '1'), ('aB', 'Ba', 'a')]NEWLINENEWLINE sage: K = digraphs.Kautz([1,'aA','BB'], 2)NEWLINE sage: K.edges()NEWLINE [('1,BB', 'BB,1', '1'), ('1,BB', 'BB,aA', 'aA'), ('1,aA', 'aA,1', '1'), ('1,aA', 'aA,BB', 'BB'), ('BB,1', '1,BB', 'BB'), ('BB,1', '1,aA', 'aA'), ('BB,aA', 'aA,1', '1'), ('BB,aA', 'aA,BB', 'BB'), ('aA,1', '1,BB', 'BB'), ('aA,1', '1,aA', 'aA'), ('aA,BB', 'BB,1', '1'), ('aA,BB', 'BB,aA', 'aA')]NEWLINENEWLINENEWLINE TESTS:NEWLINENEWLINE An exception is raised when the degree is less than one::NEWLINENEWLINE sage: G = digraphs.Kautz(0, 2)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: Kautz digraphs are defined for degree at least oneNEWLINENEWLINE sage: G = digraphs.Kautz(['a'], 2)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: Kautz digraphs are defined for degree at least oneNEWLINENEWLINE An exception is raised when the diameter of the graph is less than one::NEWLINENEWLINE sage: G = digraphs.Kautz(2, 0)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: Kautz digraphs are defined for diameter at least oneNEWLINENEWLINE :trac:`22355`::NEWLINENEWLINE sage: K = digraphs.Kautz(2, 2, vertices='strings')NEWLINE sage: K.vertices()NEWLINE ['01', '02', '10', '12', '20', '21']NEWLINE sage: h = digraphs.Kautz(2, 2, vertices='integers')NEWLINE sage: h.vertices()NEWLINE [0, 1, 2, 3, 4, 5]NEWLINE sage: h.is_isomorphic(K)NEWLINE TrueNEWLINE sage: h = digraphs.Kautz([1,'aA','BB'], 2, vertices='integers')NEWLINE sage: h.is_isomorphic(K)NEWLINE TrueNEWLINE sage: h.vertices()NEWLINE [0, 1, 2, 3, 4, 5]NEWLINE sage: digraphs.Kautz(2, 2, vertices='circles')NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: unknown type for verticesNEWLINENEWLINE REFERENCE:NEWLINENEWLINE .. [Kautz68] \W. H. Kautz. Bounds on directed (d, k) graphs. Theory ofNEWLINE cellular logic networks and machines, AFCRL-68-0668, SRI Project 7258,NEWLINE Final Rep., pp. 20-28, 1968.NEWLINE """NEWLINE if D < 1:NEWLINE raise ValueError("Kautz digraphs are defined for diameter at least one")NEWLINENEWLINE from sage.combinat.words.words import WordsNEWLINE from sage.rings.integer import IntegerNEWLINENEWLINE if vertices == 'strings':NEWLINENEWLINE my_alphabet = Words([str(i) for i in range(k+1)] if isinstance(k, Integer) else k, 1)NEWLINE if my_alphabet.alphabet().cardinality() < 2:NEWLINE raise ValueError("Kautz digraphs are defined for degree at least one")NEWLINENEWLINE # We start building the set of verticesNEWLINE V = [i for i in my_alphabet]NEWLINE for i in range(D-1):NEWLINE VV = []NEWLINE for w in V:NEWLINE VV += [w*a for a in my_alphabet if not w.has_suffix(a) ]NEWLINE V = VVNEWLINENEWLINE # We now build the set of arcsNEWLINE G = DiGraph()NEWLINE for u in V:NEWLINE for a in my_alphabet:NEWLINE if not u.has_suffix(a):NEWLINE G.add_edge(u.string_rep(), (u[1:]*a).string_rep(), a.string_rep())NEWLINENEWLINE elif vertices == 'integers':NEWLINE d = k if isinstance(k, Integer) else (len(list(k))-1)NEWLINE if d < 1:NEWLINE raise ValueError("Kautz digraphs are defined for degree at least one")NEWLINE G = digraphs.ImaseItoh( (d+1)*(d**(D-1)), d)NEWLINENEWLINE else:NEWLINE raise ValueError('unknown type for vertices')NEWLINENEWLINE G.name( "Kautz digraph (k={}, D={})".format(k, D) )NEWLINE return GNEWLINENEWLINE def RandomDirectedGN(self, n, kernel=lambda x:x, seed=None):NEWLINE """NEWLINE Returns a random GN (growing network) digraph with n vertices.NEWLINENEWLINE The digraph is constructed by adding vertices with a link to oneNEWLINE previously added vertex. The vertex to link to is chosen with aNEWLINE preferential attachment model, i.e. probability is proportional toNEWLINE degree. The default attachment kernel is a linear function ofNEWLINE degree. The digraph is always a tree, so in particular it is aNEWLINE directed acyclic graph.NEWLINENEWLINE INPUT:NEWLINENEWLINENEWLINE - ``n`` - number of vertices.NEWLINENEWLINE - ``kernel`` - the attachment kernelNEWLINENEWLINE - ``seed`` - for the random number generatorNEWLINENEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: D = digraphs.RandomDirectedGN(25)NEWLINE sage: D.edges(labels=False)NEWLINE [(1, 0), (2, 0), (3, 1), (4, 0), (5, 0), (6, 1), (7, 0), (8, 3), (9, 0), (10, 8), (11, 3), (12, 9), (13, 8), (14, 0), (15, 11), (16, 11), (17, 5), (18, 11), (19, 6), (20, 5), (21, 14), (22, 5), (23, 18), (24, 11)]NEWLINE sage: D.show() # long timeNEWLINENEWLINE REFERENCE:NEWLINENEWLINE - [1] Krapivsky, P.L. and Redner, S. Organization of GrowingNEWLINE Random Networks, Phys. Rev. E vol. 63 (2001), p. 066123.NEWLINE """NEWLINE if seed is None:NEWLINE seed = current_randstate().long_seed()NEWLINE import networkxNEWLINE return DiGraph(networkx.gn_graph(n, kernel, seed=seed))NEWLINENEWLINE def RandomDirectedGNC(self, n, seed=None):NEWLINE """NEWLINE Returns a random GNC (growing network with copying) digraph with nNEWLINE vertices.NEWLINENEWLINE The digraph is constructed by adding vertices with a link to oneNEWLINE previously added vertex. The vertex to link to is chosen with aNEWLINE preferential attachment model, i.e. probability is proportional toNEWLINE degree. The new vertex is also linked to all of the previouslyNEWLINE added vertex's successors.NEWLINENEWLINE INPUT:NEWLINENEWLINENEWLINE - ``n`` - number of vertices.NEWLINENEWLINE - ``seed`` - for the random number generatorNEWLINENEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: D = digraphs.RandomDirectedGNC(25)NEWLINE sage: D.edges(labels=False)NEWLINE [(1, 0), (2, 0), (2, 1), (3, 0), (4, 0), (4, 1), (5, 0), (5, 1), (5, 2), (6, 0), (6, 1), (7, 0), (7, 1), (7, 4), (8, 0), (9, 0), (9, 8), (10, 0), (10, 1), (10, 2), (10, 5), (11, 0), (11, 8), (11, 9), (12, 0), (12, 8), (12, 9), (13, 0), (13, 1), (14, 0), (14, 8), (14, 9), (14, 12), (15, 0), (15, 8), (15, 9), (15, 12), (16, 0), (16, 1), (16, 4), (16, 7), (17, 0), (17, 8), (17, 9), (17, 12), (18, 0), (18, 8), (19, 0), (19, 1), (19, 4), (19, 7), (20, 0), (20, 1), (20, 4), (20, 7), (20, 16), (21, 0), (21, 8), (22, 0), (22, 1), (22, 4), (22, 7), (22, 19), (23, 0), (23, 8), (23, 9), (23, 12), (23, 14), (24, 0), (24, 8), (24, 9), (24, 12), (24, 15)]NEWLINE sage: D.show() # long timeNEWLINENEWLINE REFERENCE:NEWLINENEWLINE - [1] Krapivsky, P.L. and Redner, S. Network Growth byNEWLINE Copying, Phys. Rev. E vol. 71 (2005), p. 036118.NEWLINE """NEWLINE if seed is None:NEWLINE seed = current_randstate().long_seed()NEWLINE import networkxNEWLINE return DiGraph(networkx.gnc_graph(n, seed=seed))NEWLINENEWLINE def RandomDirectedGNP(self, n, p, loops = False, seed = None):NEWLINE r"""NEWLINE Returns a random digraph on `n` nodes. Each edge is insertedNEWLINE independently with probability `p`.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` -- number of nodes of the digraphNEWLINENEWLINE - ``p`` -- probability of an edgeNEWLINENEWLINE - ``loops`` -- is a boolean set to True if the random digraph may haveNEWLINE loops, and False (default) otherwise.NEWLINENEWLINE - ``seed`` -- integer seed for random number generator (default=None).NEWLINENEWLINE REFERENCES:NEWLINENEWLINE .. [1] \P. Erdos and A. Renyi, On Random Graphs, Publ. Math. 6, 290NEWLINE (1959).NEWLINENEWLINE .. [2] \E. N. Gilbert, Random Graphs, Ann. Math. Stat., 30, 1141 (1959).NEWLINENEWLINENEWLINE PLOTTING: When plotting, this graph will use the default spring-layoutNEWLINE algorithm, unless a position dictionary is specified.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: set_random_seed(0)NEWLINE sage: D = digraphs.RandomDirectedGNP(10, .2)NEWLINE sage: D.num_verts()NEWLINE 10NEWLINE sage: D.edges(labels=False)NEWLINE [(1, 0), (1, 2), (3, 6), (3, 7), (4, 5), (4, 7), (4, 8), (5, 2), (6, 0), (7, 2), (8, 1), (8, 9), (9, 4)]NEWLINE """NEWLINE from sage.graphs.graph_generators_pyx import RandomGNPNEWLINE if 0.0 > p or 1.0 < p:NEWLINE raise ValueError("The probability p must be in [0..1].")NEWLINENEWLINE if seed is None:NEWLINE seed = current_randstate().long_seed()NEWLINENEWLINE return RandomGNP(n, p, directed = True, loops = loops)NEWLINENEWLINE def RandomDirectedGNM(self, n, m, loops = False):NEWLINE r"""NEWLINE Returns a random labelled digraph on `n` nodes and `m` arcs.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- number of vertices.NEWLINENEWLINE - ``m`` (integer) -- number of edges.NEWLINENEWLINE - ``loops`` (boolean) -- whether to allow loops (set to ``False`` byNEWLINE default).NEWLINENEWLINE PLOTTING: When plotting, this graph will use the default spring-layoutNEWLINE algorithm, unless a position dictionary is specified.NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: D = digraphs.RandomDirectedGNM(10, 5)NEWLINE sage: D.num_verts()NEWLINE 10NEWLINE sage: D.edges(labels=False)NEWLINE [(0, 3), (1, 5), (5, 1), (7, 0), (8, 5)]NEWLINENEWLINE With loops::NEWLINENEWLINE sage: D = digraphs.RandomDirectedGNM(10, 100, loops = True)NEWLINE sage: D.num_verts()NEWLINE 10NEWLINE sage: D.loops()NEWLINE [(0, 0, None), (1, 1, None), (2, 2, None), (3, 3, None), (4, 4, None), (5, 5, None), (6, 6, None), (7, 7, None), (8, 8, None), (9, 9, None)]NEWLINENEWLINE TESTS::NEWLINENEWLINE sage: digraphs.RandomDirectedGNM(10,-3)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The number of edges must satisfy 0<= m <= n(n-1) when no loops are allowed, and 0<= m <= n^2 otherwise.NEWLINENEWLINE sage: digraphs.RandomDirectedGNM(10,100)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The number of edges must satisfy 0<= m <= n(n-1) when no loops are allowed, and 0<= m <= n^2 otherwise.NEWLINE """NEWLINE n, m = int(n), int(m)NEWLINENEWLINE # The random graph is built by drawing randomly and uniformly twoNEWLINE # integers u,v, and adding the corresponding edge if it does not exist,NEWLINE # as many times as necessary.NEWLINENEWLINE # When the graph is dense, we actually compute its complement. This willNEWLINE # prevent us from drawing the same pair u,v too many times.NEWLINENEWLINE from sage.misc.prandom import _pyrandNEWLINE rand = _pyrand()NEWLINE D = DiGraph(n, loops = loops)NEWLINENEWLINE # Ensuring the parameters n,m make sense.NEWLINE #NEWLINE # If the graph is dense, we actually want to build its complement. WeNEWLINE # update m accordingly.NEWLINENEWLINE good_input = TrueNEWLINE is_dense = FalseNEWLINENEWLINE if m < 0:NEWLINE good_input = FalseNEWLINENEWLINE if loops:NEWLINE if m > n*n:NEWLINE good_input = FalseNEWLINE elif 2*m > n*n:NEWLINE is_dense = TrueNEWLINE m = n*n - mNEWLINENEWLINE else:NEWLINE if m > n*(n-1):NEWLINE good_input = FalseNEWLINE elif m > (n * (n - 1)) // 2:NEWLINE is_dense = TrueNEWLINE m = n*(n-1) - mNEWLINENEWLINE if not good_input:NEWLINE raise ValueError("The number of edges must satisfy 0<= m <= n(n-1) when no loops are allowed, and 0<= m <= n^2 otherwise.")NEWLINENEWLINE # When the given number of edges defines a density larger than 1/2, itNEWLINE # should be faster to compute the complement of the graph (less edges toNEWLINE # generate), then to return its complement. This being said, theNEWLINE # .complement() method for sparse graphs is very slow at the moment.NEWLINENEWLINE # Similarly, it is faster to test whether a pair belongs to a dictionaryNEWLINE # than to test the adjacency of two vertices in a graph. For theseNEWLINE # reasons, the following code mainly works on dictionaries.NEWLINENEWLINE adj = dict( (i, dict()) for i in range(n) )NEWLINENEWLINE # We fill the dictionary structure, but add the corresponding edge inNEWLINE # the graph only if is_dense is False. If it is true, we will add theNEWLINE # edges in a second phase.NEWLINENEWLINENEWLINE while m > 0:NEWLINENEWLINE # It is better to obtain random numbers this way than by calling theNEWLINE # randint or randrange method. This, because they are very expensiveNEWLINE # when trying to compute MANY random integers, and because theNEWLINE # following lines is precisely what they do anyway, after checkingNEWLINE # their parameters are correct.NEWLINENEWLINE u=int(rand.random()*n)NEWLINE v=int(rand.random()*n)NEWLINENEWLINE if (u != v or loops) and (not v in adj[u]):NEWLINE adj[u][v] = 1NEWLINE m -= 1NEWLINE if not is_dense:NEWLINE D.add_edge(u,v)NEWLINENEWLINE # If is_dense is True, it means the graph has not been built. We fill DNEWLINE # with the complement of the edges stored in the adj dictionaryNEWLINENEWLINE if is_dense:NEWLINE for u in range(n):NEWLINE for v in range(n):NEWLINE if ((u != v) or loops) and (not (v in adj[u])):NEWLINE D.add_edge(u,v)NEWLINENEWLINE return DNEWLINENEWLINE def RandomDirectedGNR(self, n, p, seed=None):NEWLINE """NEWLINE Returns a random GNR (growing network with redirection) digraphNEWLINE with n vertices and redirection probability p.NEWLINENEWLINE The digraph is constructed by adding vertices with a link to oneNEWLINE previously added vertex. The vertex to link to is chosen uniformly.NEWLINE With probability p, the arc is instead redirected to the successorNEWLINE vertex. The digraph is always a tree.NEWLINENEWLINE INPUT:NEWLINENEWLINENEWLINE - ``n`` - number of vertices.NEWLINENEWLINE - ``p`` - redirection probabilityNEWLINENEWLINE - ``seed`` - for the random number generator.NEWLINENEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: D = digraphs.RandomDirectedGNR(25, .2)NEWLINE sage: D.edges(labels=False)NEWLINE [(1, 0), (2, 0), (2, 1), (3, 0), (4, 0), (4, 1), (5, 0), (5, 1), (5, 2), (6, 0), (6, 1), (7, 0), (7, 1), (7, 4), (8, 0), (9, 0), (9, 8), (10, 0), (10, 1), (10, 2), (10, 5), (11, 0), (11, 8), (11, 9), (12, 0), (12, 8), (12, 9), (13, 0), (13, 1), (14, 0), (14, 8), (14, 9), (14, 12), (15, 0), (15, 8), (15, 9), (15, 12), (16, 0), (16, 1), (16, 4), (16, 7), (17, 0), (17, 8), (17, 9), (17, 12), (18, 0), (18, 8), (19, 0), (19, 1), (19, 4), (19, 7), (20, 0), (20, 1), (20, 4), (20, 7), (20, 16), (21, 0), (21, 8), (22, 0), (22, 1), (22, 4), (22, 7), (22, 19), (23, 0), (23, 8), (23, 9), (23, 12), (23, 14), (24, 0), (24, 8), (24, 9), (24, 12), (24, 15)]NEWLINE sage: D.show() # long timeNEWLINENEWLINE REFERENCE:NEWLINENEWLINE - [1] Krapivsky, P.L. and Redner, S. Organization of GrowingNEWLINE Random Networks, Phys. Rev. E vol. 63 (2001), p. 066123.NEWLINE """NEWLINE if seed is None:NEWLINE seed = current_randstate().long_seed()NEWLINE import networkxNEWLINE return DiGraph(networkx.gnc_graph(n, seed=seed))NEWLINENEWLINE def RandomSemiComplete(self, n):NEWLINE r"""NEWLINE Return a random semi-complete digraph on `n` vertices.NEWLINENEWLINE A directed graph `G=(V,E)` is *semi-complete* if for any pair ofNEWLINE vertices `u` and `v`, there is *at least* one arc between them.NEWLINENEWLINE To generate randomly a semi-complete digraph, we have to ensure, for anyNEWLINE pair of distinct vertices `u` and `v`, that with probability `1/3` weNEWLINE have only arc `uv`, with probability `1/3` we have only arc `vu`, andNEWLINE with probability `1/3` we have both arc `uv` and arc `vu`. We do so byNEWLINE selecting a random integer `coin` in `[1,3]`. When `coin==1` we selectNEWLINE only arc `uv`, when `coin==3` we select only arc `vu`, and whenNEWLINE `coin==2` we select both arcs. In other words, we select arc `uv` whenNEWLINE `coin\leq 2` and arc `vu` when `coin\geq 2`.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``n`` (integer) -- the number of nodesNEWLINENEWLINE .. SEEALSO::NEWLINENEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.Complete`NEWLINENEWLINE - :meth:`~sage.graphs.digraph_generators.DiGraphGenerators.RandomTournament`NEWLINENEWLINE EXAMPLES::NEWLINENEWLINE sage: SC = digraphs.RandomSemiComplete(10); SCNEWLINE Random Semi-Complete digraph: Digraph on 10 verticesNEWLINE sage: SC.size() >= binomial(10, 2)NEWLINE TrueNEWLINE sage: digraphs.RandomSemiComplete(-1)NEWLINE Traceback (most recent call last):NEWLINE ...NEWLINE ValueError: The number of vertices cannot be strictly negative!NEWLINE """NEWLINE G = DiGraph(n, name="Random Semi-Complete digraph")NEWLINENEWLINE # For each pair u,v we choose a randon number ``coin`` in [1,3].NEWLINE # We select edge `(u,v)` if `coin==1` or `coin==2`.NEWLINE # We select edge `(v,u)` if `coin==2` or `coin==3`.NEWLINE import itertoolsNEWLINE from sage.misc.prandom import randintNEWLINE for u,v in itertools.combinations(range(n), 2):NEWLINE coin = randint(1,3)NEWLINE if coin<=2:NEWLINE G.add_edge(u,v)NEWLINE if coin>=2:NEWLINE G.add_edge(v,u)NEWLINENEWLINE G._circle_embedding(list(range(n)))NEWLINENEWLINE return GNEWLINENEWLINE################################################################################NEWLINE# DiGraph IteratorsNEWLINE################################################################################NEWLINENEWLINE def __call__(self, vertices=None, property=lambda x: True, augment='edges',NEWLINE size=None, implementation='c_graph', sparse=True, copy=True):NEWLINE """NEWLINE Accesses the generator of isomorphism class representatives.NEWLINE Iterates over distinct, exhaustive representatives.NEWLINENEWLINE INPUT:NEWLINENEWLINE - ``vertices`` - natural number or ``None`` to generate all digraphsNEWLINENEWLINE - ``property`` - any property to be tested on digraphsNEWLINE before generation.NEWLINENEWLINE - ``augment`` - choices:NEWLINENEWLINE - ``'vertices'`` - augments by adding a vertex, andNEWLINE edges incident to that vertex. In this case, all digraphs on up toNEWLINE n=vertices are generated. If for any digraph G satisfying theNEWLINE property, every subgraph, obtained from G by deleting one vertexNEWLINE and only edges incident to that vertex, satisfies the property,NEWLINE then this will generate all digraphs with that property. If thisNEWLINE does not hold, then all the digraphs generated will satisfy theNEWLINE property, but there will be some missing.NEWLINENEWLINE - ``'edges'`` - augments a fixed number of vertices byNEWLINE adding one edge In this case, all digraphs on exactly n=verticesNEWLINE are generated. If for any graph G satisfying the property, everyNEWLINE subgraph, obtained from G by deleting one edge but not the verticesNEWLINE incident to that edge, satisfies the property, then this willNEWLINE generate all digraphs with that property. If this does not hold,NEWLINE then all the digraphs generated will satisfy the property, butNEWLINE there will be some missing.NEWLINENEWLINE - ``implementation`` - which underlying implementation to use (see DiGraph?)NEWLINENEWLINE - ``sparse`` - ignored if implementation is not ``c_graph``NEWLINENEWLINE - ``copy`` (boolean) -- If set to ``True`` (default)NEWLINE this method makes copies of the digraphs before returningNEWLINE them. If set to ``False`` the method returns the digraph itNEWLINE is working on. The second alternative is faster, but modifyingNEWLINE any of the digraph instances returned by the method may breakNEWLINE the function's behaviour, as it is using these digraphs toNEWLINE compute the next ones: only use ``copy = False`` whenNEWLINE you stick to *reading* the digraphs returned.NEWLINENEWLINE EXAMPLES: Print digraphs on 2 or less vertices.NEWLINENEWLINE ::NEWLINENEWLINE sage: for D in digraphs(2, augment='vertices'):NEWLINE ....: print(D)NEWLINE Digraph on 0 verticesNEWLINE Digraph on 1 vertexNEWLINE Digraph on 2 verticesNEWLINE Digraph on 2 verticesNEWLINE Digraph on 2 verticesNEWLINENEWLINE Print digraphs on 3 vertices.NEWLINENEWLINE ::NEWLINENEWLINE sage: for D in digraphs(3):NEWLINE ....: print(D)NEWLINE Digraph on 3 verticesNEWLINE Digraph on 3 verticesNEWLINE ...NEWLINE Digraph on 3 verticesNEWLINE Digraph on 3 verticesNEWLINENEWLINE For more examples, see the class level documentation, or typeNEWLINENEWLINE ::NEWLINENEWLINE sage: digraphs? # not testedNEWLINENEWLINE REFERENCE:NEWLINENEWLINE - Brendan D. McKay, Isomorph-Free Exhaustive generation.NEWLINE Journal of Algorithms Volume 26, Issue 2, February 1998,NEWLINE pages 306-324.NEWLINE """NEWLINE from copy import copy as copyfunNEWLINE if size is not None:NEWLINE extra_property = lambda x: x.size() == sizeNEWLINE else:NEWLINE extra_property = lambda x: TrueNEWLINE if augment == 'vertices':NEWLINE if vertices is None:NEWLINE raise NotImplementedErrorNEWLINENEWLINE from sage.graphs.graph_generators import canaug_traverse_vertNEWLINE g = DiGraph(implementation=implementation, sparse=sparse)NEWLINE for gg in canaug_traverse_vert(g, [], vertices, property, dig=True, implementation=implementation, sparse=sparse):NEWLINE if extra_property(gg):NEWLINE yield copyfun(gg) if copy else ggNEWLINENEWLINE elif augment == 'edges':NEWLINENEWLINE if vertices is None:NEWLINE vertices = 0NEWLINE while True:NEWLINE for g in self(vertices, implementation=implementation, sparse=sparse, copy=copy):NEWLINE yield gNEWLINE vertices += 1NEWLINENEWLINE from sage.graphs.graph_generators import canaug_traverse_edgeNEWLINE g = DiGraph(vertices, implementation=implementation, sparse=sparse)NEWLINE gens = []NEWLINE for i in range(vertices-1):NEWLINE gen = list(range(i))NEWLINE gen.append(i+1); gen.append(i)NEWLINE gen += list(range(i + 2, vertices))NEWLINE gens.append(gen)NEWLINE for gg in canaug_traverse_edge(g, gens, property, dig=True, implementation=implementation, sparse=sparse):NEWLINE if extra_property(gg):NEWLINE yield copyfun(gg) if copy else ggNEWLINE else:NEWLINE raise NotImplementedError()NEWLINENEWLINENEWLINE# Easy access to the graph generators from the command line:NEWLINEdigraphs = DiGraphGenerators()NEWLINENEWLINENEWLINENEWLINENEWLINE
N = int(input(''))NEWLINEfor i in range(1, N + 1):NEWLINE if i % 2 == 0:NEWLINE print('%d^2' %i, '=', i **2)
from django.db import modelsNEWLINENEWLINENEWLINEclass PersistentQuery(models.Model):NEWLINE name = models.CharField("Query name", max_length=80)NEWLINE query_id = models.SlugField("Query ID", max_length=80, primary_key=True, blank=True, db_index=True)NEWLINE insert_pk = models.CharField("Insert PK", blank=True, null=True, max_length=255, help_text="Inform insert table pk field, qualified with table and schema, to allow retrieving last inserted row")NEWLINE query_pk = models.CharField("Query PK", blank=True, null=True, max_length=255, help_text="Inform query pk qualified field to allow retrieving last inserted row")NEWLINE conn_name = models.CharField("Connection name", blank=True, null=True, max_length=255, help_text="Specify connection name (blank = query_db)")NEWLINE description = models.CharField(max_length=255, blank=True, null=True)NEWLINE sql_query = models.TextField("SQL base query", blank=True, null=True)NEWLINE sql_search_where = models.TextField("SQL search where", blank=True, null=True)NEWLINE sql_insert = models.TextField("SQL Insert", blank=True, null=True)NEWLINE before_insert = models.CharField("Before insert", max_length=255, blank=True, null=True, help_text="Method in a service to be called before record is inserted (service/method)")NEWLINE sql_update = models.TextField("SQL Update", blank=True, null=True)NEWLINE before_update = models.CharField("Before insert", max_length=255, blank=True, null=True, help_text="Method in a service to be called before record is updated (service/method)")NEWLINE sql_delete = models.TextField("SQL Delete", blank=True, null=True)NEWLINE before_delete = models.CharField("Before insert", max_length=255, blank=True, null=True, help_text="Method in a service to be called before record is deleted (service/method)")NEWLINENEWLINE def __str__(self):NEWLINE return self.query_idNEWLINENEWLINE class Meta:NEWLINE verbose_name = "Persistent query"NEWLINE verbose_name_plural = "Persistent queries"NEWLINENEWLINENEWLINEclass PersistentNestedQuery(models.Model):NEWLINE parent = models.ForeignKey(PersistentQuery, on_delete=models.CASCADE, verbose_name="Parent query", related_name="nested_query")NEWLINE child = models.ForeignKey(PersistentQuery, on_delete=models.CASCADE, verbose_name="Child query", related_name="parent_query")NEWLINE attr_name = models.CharField("Attribute name", max_length=100, help_text="Name of attribute that will receive nested result")NEWLINE related_field = models.CharField("Related field", max_length=100, help_text="Name of field related to parent PK")NEWLINE
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEfrom __future__ import print_functionNEWLINEimport copyNEWLINEimport warningsNEWLINEimport paddleNEWLINEimport osNEWLINEimport numpy as npNEWLINEfrom paddle.fluid.framework import dygraph_onlyNEWLINEfrom paddle.fluid import compilerNEWLINEfrom .role_maker import UserDefinedRoleMaker, PaddleCloudRoleMaker, RoleMakerBaseNEWLINEfrom .strategy_compiler import StrategyCompilerNEWLINEfrom .distributed_strategy import DistributedStrategyNEWLINEfrom .meta_optimizer_factory import MetaOptimizerFactoryNEWLINEfrom .runtime_factory import RuntimeFactoryNEWLINEfrom paddle.fluid.wrapped_decorator import wrap_decoratorNEWLINEfrom paddle.fluid.dygraph import parallel_helperNEWLINEfrom . import topology as tpNEWLINEfrom .topology import ParallelModeNEWLINEfrom ..meta_parallel import TensorParallel, model_parallel_random_seedNEWLINEfrom ..meta_parallel import PipelineParallel, ShardingParallelNEWLINEfrom ..meta_optimizers import HybridParallelOptimizerNEWLINEfrom ..meta_optimizers import HybridParallelGradScalerNEWLINENEWLINE__all__ = []NEWLINENEWLINENEWLINEdef _inited_runtime_handler_(func):NEWLINE def __impl__(*args, **kwargs):NEWLINE cls = args[0]NEWLINENEWLINE if cls._runtime_handle is None:NEWLINE raise ValueError("Fleet can not find suitable runtime handler")NEWLINENEWLINE return func(*args, **kwargs)NEWLINENEWLINE return __impl__NEWLINENEWLINENEWLINEdef _is_non_distributed_check_(func):NEWLINE def __impl__(*args, **kwargs):NEWLINE cls = args[0]NEWLINENEWLINE if cls._role_maker is not None and cls._role_maker._is_non_distributed(NEWLINE ) is True:NEWLINE warnings.warn(NEWLINE "%s() function doesn't work when use non_distributed fleet." %NEWLINE (func.__name__))NEWLINE returnNEWLINENEWLINE return func(*args, **kwargs)NEWLINENEWLINE return __impl__NEWLINENEWLINENEWLINEinited_runtime_handler = wrap_decorator(_inited_runtime_handler_)NEWLINEis_non_distributed_check = wrap_decorator(_is_non_distributed_check_)NEWLINENEWLINENEWLINEclass Fleet(object):NEWLINE """NEWLINE Unified API for distributed training of PaddlePaddleNEWLINE Please reference the https://github.com/PaddlePaddle/FleetX for detailsNEWLINENEWLINENEWLINE Returns:NEWLINE Fleet: A Fleet instanceNEWLINENEWLINE Example for collective training:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddleNEWLINE paddle.enable_static()NEWLINE import paddle.distributed.fleet as fleetNEWLINENEWLINE fleet.init(is_collective=True)NEWLINENEWLINE strategy = fleet.DistributedStrategy()NEWLINE optimizer = paddle.optimizer.SGD(learning_rate=0.001)NEWLINE optimizer = fleet.distributed_optimizer(optimizer, strategy=strategy)NEWLINENEWLINE # do distributed trainingNEWLINENEWLINENEWLINE Example for parameter server training:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddleNEWLINE paddle.enable_static()NEWLINE import paddle.distributed.fleet as fleetNEWLINE strategy = fleet.DistributedStrategy()NEWLINE fleet.init(strategy=strategy)NEWLINENEWLINE optimizer = paddle.optimizer.SGD(learning_rate=0.001)NEWLINE optimizer = fleet.distributed_optimizer(optimizer)NEWLINENEWLINE if fleet.is_first_worker():NEWLINE print("this is first worker")NEWLINENEWLINE print("current node index: {}".format(fleet.worker_index()))NEWLINE print("total number of worker num: {}".format(fleet.worker_num()))NEWLINENEWLINE if fleet.is_worker():NEWLINE print("this is worker")NEWLINE print("worker endpoints: {}".format(fleet.worker_endpoints(to_string=True)))NEWLINENEWLINE print("server num: {}".format(fleet.server_num()))NEWLINE print("server endpoints: {}".format(fleet.server_endpoints(to_string=True)))NEWLINENEWLINE if fleet.is_server():NEWLINE print("this is server")NEWLINE fleet.stop_worker()NEWLINENEWLINENEWLINE """NEWLINENEWLINE def __init__(self):NEWLINE self._role_maker = NoneNEWLINE self.strategy_compiler = NoneNEWLINE self._is_collective = FalseNEWLINE self._runtime_handle = NoneNEWLINE self._util = NoneNEWLINE self._context = {}NEWLINENEWLINE def init(self, role_maker=None, is_collective=False, strategy=None):NEWLINE """NEWLINE Initialize role_maker in Fleet.NEWLINENEWLINE This function is responsible for the distributed architectureNEWLINE what you want to run your code behind.NEWLINENEWLINE Args:NEWLINE role_maker (RoleMakerBase, optional): A ``RoleMakerBase`` containing the configurationNEWLINE of environment variables related to distributed training.If you did not initialize NEWLINE the rolemaker by yourself, it will be automatically initialized to PaddleRoleMaker.NEWLINE The default value is None.NEWLINE is_collective (Boolean, optional): A ``Boolean`` variable determines whether the program NEWLINE runs on the CPU or GPU. False means set distributed training using CPU, and True meansNEWLINE GPU.The default value is False.The default value is False.NEWLINE strategy (DistributedStrategy): Extra properties for distributed training. NEWLINE For details, please refer to paddle.distributed.fleet.DistributedStrategy. Default: None.NEWLINENEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples1:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINENEWLINE Examples2:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init(is_collective=True)NEWLINENEWLINE Examples3:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE role = fleet.PaddleCloudRoleMaker()NEWLINE fleet.init(role)NEWLINENEWLINE Examples4:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE strategy = fleet.DistributedStrategy()NEWLINE fleet.init(strategy=strategy)NEWLINENEWLINE """NEWLINE if strategy is None:NEWLINE strategy = DistributedStrategy()NEWLINE self._user_defined_strategy = copy.deepcopy(strategy)NEWLINENEWLINE if role_maker is None:NEWLINE if isinstance(is_collective, bool):NEWLINE self._is_collective = is_collectiveNEWLINE self._role_maker = PaddleCloudRoleMaker(NEWLINE is_collective=self._is_collective)NEWLINE else:NEWLINE raise ValueError(NEWLINE "`is_collective` should be instance of `bool`, but got {}".NEWLINE format(type(is_collective)))NEWLINE else:NEWLINE if isinstance(role_maker, RoleMakerBase):NEWLINE self._role_maker = role_makerNEWLINE self._is_collective = role_maker._is_collectiveNEWLINE else:NEWLINE raise ValueError(NEWLINE "`role_maker` should be subclass of `RoleMakerBase`, but got {}".NEWLINE format(type(role_maker)))NEWLINE self._role_maker._generate_role()NEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.util._set_role_maker(self._role_maker)NEWLINENEWLINE self.strategy_compiler = StrategyCompiler()NEWLINENEWLINE if self._role_maker._is_non_distributed() and self._is_collective:NEWLINE if paddle.fluid.core.is_compiled_with_cuda():NEWLINE gpus_num = paddle.fluid.core.get_cuda_device_count()NEWLINE if gpus_num != 1:NEWLINE raise ValueError(NEWLINE "CUDA_VISIBLE_DEVICES shoule be set only 1 card if you use `python` to launch fleet program."NEWLINE )NEWLINENEWLINE if paddle.fluid.framework.in_dygraph_mode():NEWLINE if self.worker_num() == 1:NEWLINE # if worker_num is 1, should construct default topology & hcgNEWLINE self._topology = tp.CommunicateTopology()NEWLINE self._hcg = tp.HybridCommunicateGroup(self._topology)NEWLINE returnNEWLINE if parallel_helper._is_parallel_ctx_initialized():NEWLINE warnings.warn(NEWLINE "The dygraph parallel environment has been initialized.")NEWLINE else:NEWLINE # FLAGS_nccl_nrings is used for dynamic graph multi-stream communicationNEWLINE if "FLAGS_nccl_nrings" in os.environ:NEWLINE warnings.warn(NEWLINE "You have set the environment variable FLAGS_nccl_nrings "NEWLINE "outside the program, so the nccl_comm_num in "NEWLINE "DistributedStrategy will not take effect here.")NEWLINE else:NEWLINE os.environ["FLAGS_nccl_nrings"] = str(NEWLINE self._user_defined_strategy.nccl_comm_num)NEWLINE paddle.distributed.init_parallel_env()NEWLINENEWLINE # init hybrid parallel environment in dygraphNEWLINE if tp._HYBRID_PARALLEL_GROUP is None:NEWLINE self._init_hybrid_parallel_env()NEWLINE else:NEWLINE warnings.warn(NEWLINE "The dygraph hybrid parallel environment has been initialized."NEWLINE )NEWLINE elif self._is_collective:NEWLINE use_sharding = self._user_defined_strategy.shardingNEWLINENEWLINE # global groupNEWLINE global_rank = self.worker_index()NEWLINE global_world_size = self.worker_num()NEWLINE # NOTE(wangxi): see sharding_optimizerNEWLINE global_ring_id = 3 if use_sharding else 0NEWLINE global_ranks = list(range(global_world_size))NEWLINENEWLINE if tp._HYBRID_PARALLEL_GROUP is None: tp._CommunicateGroup()NEWLINE cg = tp._HYBRID_PARALLEL_GROUPNEWLINE self._hcg = cgNEWLINE cg.set_comm_group('global', global_rank, global_world_size,NEWLINE global_ring_id, global_ranks)NEWLINENEWLINE use_tensor_parallel = self._user_defined_strategy.tensor_parallelNEWLINE use_mp = use_sharding or use_tensor_parallelNEWLINENEWLINE # hybrid groupNEWLINE if use_mp is False: returnNEWLINENEWLINE mp_degree_sharding = 1NEWLINE mp_degree_tensor_parallel = 1NEWLINE if use_sharding:NEWLINE sharding_configs = self._user_defined_strategy.sharding_configsNEWLINE mp_degree_sharding = int(sharding_configs['mp_degree'])NEWLINENEWLINE if use_tensor_parallel:NEWLINE tensor_parallel_configs = self._user_defined_strategy.tensor_parallel_configsNEWLINE mp_degree_tensor_parallel = int(tensor_parallel_configs[NEWLINE 'tensor_parallel_degree'])NEWLINENEWLINE if use_sharding and use_tensor_parallel:NEWLINE assert mp_degree_sharding == mp_degree_tensor_parallelNEWLINENEWLINE mp_degree = mp_degree_sharding if use_sharding else mp_degree_tensor_parallelNEWLINENEWLINE if mp_degree > 1:NEWLINE assert global_world_size % mp_degree == 0NEWLINE # NOTE(wangxi): mp_ring_id sync with sharding_optimizer.py _build_groupsNEWLINE mp_ring_id = 0NEWLINE mp_rank = global_rank % mp_degreeNEWLINE mp_group_id = global_rank // mp_degreeNEWLINE mp_group_ranks = [NEWLINE idx for idx in global_ranksNEWLINE if idx // mp_degree == mp_group_idNEWLINE ]NEWLINE cg.set_comm_group('model', mp_rank, mp_degree, mp_ring_id,NEWLINE mp_group_ranks)NEWLINENEWLINE def _init_hybrid_parallel_env(self):NEWLINE """initialize the hybrid environmentNEWLINE """NEWLINE self.hybrid_configs = self._user_defined_strategy.hybrid_configsNEWLINE self.dp_degree = self.hybrid_configs["dp_degree"]NEWLINE self.mp_degree = self.hybrid_configs["mp_degree"]NEWLINE self.pp_degree = self.hybrid_configs["pp_degree"]NEWLINE self.sharding_degree = self.hybrid_configs["sharding_degree"]NEWLINENEWLINE assert self.mp_degree >= 0, "mp_degree should be greater or equal to 0"NEWLINE assert self.pp_degree >= 0, "pp_degree should be greater or equal to 0"NEWLINE assert self.sharding_degree >= 0, "sharding_degree should be greater or equal to 0"NEWLINENEWLINE self.mp_degree = max(self.mp_degree, 1)NEWLINE self.pp_degree = max(self.pp_degree, 1)NEWLINENEWLINE if self.dp_degree < 0:NEWLINE nranks = paddle.distributed.get_world_size()NEWLINE self.dp_degree = nranks // (self.mp_degree * self.pp_degree)NEWLINENEWLINE self.dp_degree = max(self.dp_degree, 1)NEWLINENEWLINE self._topology = tp.CommunicateTopology(NEWLINE hybrid_group_names=["data", "pipe", "sharding", "model"],NEWLINE dims=[NEWLINE self.dp_degree, self.pp_degree, self.sharding_degree,NEWLINE self.mp_degreeNEWLINE ])NEWLINENEWLINE self._hcg = tp.HybridCommunicateGroup(self._topology)NEWLINENEWLINE if self.mp_degree > 1:NEWLINE tensor_parallel_configs = self._user_defined_strategy.tensor_parallel_configsNEWLINE tensor_init_seed = tensor_parallel_configs["tensor_init_seed"]NEWLINE if tensor_init_seed == -1:NEWLINE model_parallel_random_seed()NEWLINE else:NEWLINE model_parallel_random_seed(tensor_init_seed)NEWLINENEWLINE def get_hybrid_communicate_group(self):NEWLINE assert self._hcg is not NoneNEWLINE return self._hcgNEWLINENEWLINE def get_hybrid_parallel_topology(self):NEWLINE assert self._topology is not NoneNEWLINE return self._topologyNEWLINENEWLINE def is_first_worker(self):NEWLINE """NEWLINE Check whether the node is the first instance of worker.NEWLINENEWLINE Returns:NEWLINE bool: True if this is the first node of worker,NEWLINE False if not.NEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.is_first_worker()NEWLINENEWLINE """NEWLINE return self._role_maker._is_first_worker()NEWLINENEWLINE def worker_index(self):NEWLINE """NEWLINE Get current worker index.NEWLINENEWLINE Returns:NEWLINE int: node idNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.worker_index()NEWLINENEWLINE """NEWLINE return self._role_maker._worker_index()NEWLINENEWLINE def worker_num(self):NEWLINE """NEWLINE Get current total worker number.NEWLINENEWLINE Returns:NEWLINE int: worker numbersNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.worker_num()NEWLINENEWLINE """NEWLINE return self._role_maker._worker_num()NEWLINENEWLINE def node_num(self):NEWLINE return self._role_maker._get_node_num()NEWLINENEWLINE def local_rank(self):NEWLINE return self._role_maker._get_local_rank()NEWLINENEWLINE def local_device_ids(self):NEWLINE return self._role_maker._get_local_device_ids()NEWLINENEWLINE def world_device_ids(self):NEWLINE return self._role_maker._get_world_device_ids()NEWLINENEWLINE def is_worker(self):NEWLINE """NEWLINE Check whether the node is an instance of worker.NEWLINENEWLINE Returns:NEWLINE bool: True if this is a node of worker,NEWLINE False if not.NEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.is_worker()NEWLINENEWLINE """NEWLINE return self._role_maker._is_worker()NEWLINENEWLINE def worker_endpoints(self, to_string=False):NEWLINE """NEWLINE Get current worker endpoints, such as ["127.0.0.1:1001", "127.0.0.1:1002"].NEWLINENEWLINE Returns:NEWLINE list/string: server endpointsNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.worker_endpoints()NEWLINENEWLINE """NEWLINE if to_string:NEWLINE return ",".join(self._role_maker._get_trainer_endpoints())NEWLINE else:NEWLINE return self._role_maker._get_trainer_endpoints()NEWLINENEWLINE def server_num(self):NEWLINE """NEWLINE Get current total worker number.NEWLINENEWLINE Returns:NEWLINE int: server numberNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.server_num()NEWLINE """NEWLINE return len(self._role_maker._get_pserver_endpoints())NEWLINENEWLINE def server_index(self):NEWLINE """NEWLINE Get current server index.NEWLINENEWLINE Returns:NEWLINE int: node idNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.server_index()NEWLINENEWLINE """NEWLINE return self._role_maker._server_index()NEWLINENEWLINE def server_endpoints(self, to_string=False):NEWLINE """NEWLINE Get current server endpoints, such as ["127.0.0.1:1001", "127.0.0.1:1002"].NEWLINENEWLINE Returns:NEWLINE list/string: server endpointsNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.server_endpoints()NEWLINENEWLINE """NEWLINENEWLINE if to_string:NEWLINE return ",".join(self._role_maker._get_pserver_endpoints())NEWLINE else:NEWLINE return self._role_maker._get_pserver_endpoints()NEWLINENEWLINE def is_server(self):NEWLINE """NEWLINE Check whether the node is an instance of server.NEWLINENEWLINE Returns:NEWLINE bool: True if this is a node of server,NEWLINE False if not.NEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINE fleet.is_server()NEWLINENEWLINE """NEWLINE return self._role_maker._is_server(NEWLINE ) or self._role_maker._is_heter_worker()NEWLINENEWLINE def barrier_worker(self):NEWLINE """NEWLINE barrier all workersNEWLINENEWLINE Returns:NEWLINE NoneNEWLINE """NEWLINE self._role_maker._barrier("worker")NEWLINENEWLINE @is_non_distributed_checkNEWLINE @inited_runtime_handlerNEWLINE def init_worker(self):NEWLINE """NEWLINE initialize `Communicator` for parameter server training.NEWLINENEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINENEWLINE # build netNEWLINE # fleet.distributed_optimizer(...)NEWLINENEWLINE fleet.init_worker()NEWLINENEWLINE """NEWLINE self._runtime_handle._init_worker()NEWLINENEWLINE @is_non_distributed_checkNEWLINE @inited_runtime_handlerNEWLINE def init_server(self, *args, **kwargs):NEWLINE """NEWLINE init_server executor to initialize startup program,NEWLINE if the `args` is not empty, it will run load_persistables for increment training.NEWLINENEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINENEWLINE # build netNEWLINE # fleet.distributed_optimizer(...)NEWLINENEWLINE fleet.init_server()NEWLINENEWLINE """NEWLINE self._runtime_handle._init_server(*args, **kwargs)NEWLINENEWLINE def load_model(self, path, mode):NEWLINE """NEWLINE load fleet model from pathNEWLINENEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINENEWLINE # build netNEWLINE # fleet.distributed_optimizer(...)NEWLINENEWLINE fleet.load_model("path", "mode")NEWLINENEWLINE """NEWLINE self._runtime_handle.load_model(path, mode)NEWLINENEWLINE @is_non_distributed_checkNEWLINE @inited_runtime_handlerNEWLINE def run_server(self):NEWLINE """NEWLINE run server will run pserver main program with executor.NEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINENEWLINE # build netNEWLINE # fleet.distributed_optimizer(...)NEWLINENEWLINE if fleet.is_server():NEWLINE fleet.init_server()NEWLINENEWLINE """NEWLINE self._runtime_handle._run_server()NEWLINENEWLINE @is_non_distributed_checkNEWLINE @inited_runtime_handlerNEWLINE def stop_worker(self):NEWLINE """NEWLINE stop `Communicator` and give training complete notice to parameter server.NEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINENEWLINE # build netNEWLINE # fleet.distributed_optimizer(...)NEWLINENEWLINE fleet.init_server()NEWLINENEWLINE """NEWLINE self._runtime_handle._stop_worker()NEWLINENEWLINE def save(self, dirname, feed=[], fetch=[], **configs):NEWLINE inference = TrueNEWLINENEWLINE if not feed and not fetch:NEWLINE inference = FalseNEWLINENEWLINE place = paddle.CPUPlace()NEWLINE executor = paddle.static.Executor(place)NEWLINENEWLINE if inference:NEWLINE feeded_var_names = []NEWLINE fetch_var_names = []NEWLINENEWLINE for var in feed:NEWLINE if isinstance(var, str):NEWLINE feeded_var_names.append(var)NEWLINE elif isinstance(var, paddle.static.Variable):NEWLINE feeded_var_names.append(var.name)NEWLINE else:NEWLINE raise ValueError("feed must be [str|Variable]")NEWLINENEWLINE for var in fetch:NEWLINE if isinstance(var, str):NEWLINE fetch_var_names.append(var)NEWLINE elif isinstance(var, paddle.static.Variable):NEWLINE fetch_var_names.append(var.name)NEWLINE else:NEWLINE raise ValueError("feed must be [str|Variable]")NEWLINENEWLINE fetch_vars = [NEWLINE paddle.static.default_main_program().global_block().var(name)NEWLINE for name in fetch_var_namesNEWLINE ]NEWLINENEWLINE self._runtime_handle._save_inference_model(NEWLINE executor, dirname, feeded_var_names, fetch_vars, None, True, 0)NEWLINE else:NEWLINE increment_mode = 0NEWLINE if "mode" in configs:NEWLINE increment_mode = int(configs["mode"])NEWLINE self._runtime_handle._save_persistables(NEWLINE executor, dirname, main_program=None, mode=increment_mode)NEWLINENEWLINE def save_inference_model(self,NEWLINE executor,NEWLINE dirname,NEWLINE feeded_var_names,NEWLINE target_vars,NEWLINE main_program=None,NEWLINE export_for_deployment=True,NEWLINE mode=0):NEWLINE """NEWLINE save inference model for inference.NEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init()NEWLINENEWLINE # build netNEWLINE # fleet.distributed_optimizer(...)NEWLINENEWLINE fleet.init_server()NEWLINENEWLINE """NEWLINE # warnings.warn(NEWLINE # "'save_inference_model' is a deprecated, will be deleted after v2.2.0, Please use fleet.save instead."NEWLINE # )NEWLINENEWLINE self._runtime_handle._save_inference_model(NEWLINE executor, dirname, feeded_var_names, target_vars, main_program,NEWLINE export_for_deployment, mode)NEWLINENEWLINE def save_persistables(self, executor, dirname, main_program=None, mode=0):NEWLINE """NEWLINENEWLINE saves all persistable tensors from :code:`main_program` toNEWLINE the folder :code:`dirname`. You can refer toNEWLINENEWLINE The :code:`dirname` is used to specify the folder where persistable tensorsNEWLINE are going to be saved. If you would like to save tensors in separateNEWLINE files, set :code:`filename` None.NEWLINENEWLINE Args:NEWLINE executor(Executor): The executor to run for saving persistable tensors.NEWLINE You can refer to :ref:`api_guide_executor_en` forNEWLINE more details.NEWLINENEWLINE dirname(str, optional): The saving directory path.NEWLINE When you need to save the parameter to the memory, set it to None.NEWLINE main_program(Program, optional): The program whose persistbale tensors willNEWLINE be saved. Default: None.NEWLINENEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: textNEWLINENEWLINE import paddleNEWLINE paddle.enable_static()NEWLINE import paddle.distributed.fleet as fleetNEWLINENEWLINE fleet.init()NEWLINENEWLINE # build netNEWLINE # fleet.distributed_optimizer(...)NEWLINENEWLINE exe = paddle.static.Executor(paddle.CPUPlace())NEWLINE fleet.save_persistables(exe, "dirname", paddle.static.default_main_program())NEWLINENEWLINE """NEWLINE # warnings.warn(NEWLINE # "'save_persistables' is a deprecated, will be deleted after v2.2.0, Please use fleet.save instead."NEWLINE # )NEWLINENEWLINE self._runtime_handle._save_persistables(executor, dirname, main_program,NEWLINE mode)NEWLINENEWLINE def shrink(self, threshold):NEWLINE self._runtime_handle._shrink(threshold)NEWLINENEWLINE def distributed_optimizer(self, optimizer, strategy=None):NEWLINE """NEWLINE Optimizer for distributed training.NEWLINENEWLINE For the distributed training, this method would rebuild a new instance of DistributedOptimizer.NEWLINE Which has basic Optimizer function and special features for distributed training.NEWLINENEWLINE Args:NEWLINE optimizer(Optimizer): The executor to run for init server.NEWLINE strategy(DistributedStrategy): Extra properties for distributed optimizer. NEWLINE It is recommended to use DistributedStrategy in fleet.init(). The strategyNEWLINE here is for compatibility. If the strategy in fleet.distributed_optimizer() NEWLINE is not None, then it will overwrite the DistributedStrategy in fleet.init(), NEWLINE which will take effect in distributed training.NEWLINENEWLINE Returns:NEWLINE Fleet: instance of fleet.NEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddleNEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.init(is_collective=True)NEWLINE strategy = fleet.DistributedStrategy()NEWLINE optimizer = paddle.optimizer.SGD(learning_rate=0.001)NEWLINE optimizer = fleet.distributed_optimizer(optimizer, strategy=strategy)NEWLINENEWLINE """NEWLINE self.user_defined_optimizer = optimizerNEWLINENEWLINE if strategy is not None:NEWLINE if self._is_collective:NEWLINE warnings.warn(NEWLINE "It is recommended to use DistributedStrategy "NEWLINE "in fleet.init(). The strategy here is only for compatibility. "NEWLINE "If the strategy in fleet.distributed_optimizer() is "NEWLINE "not None, then it will overwrite the DistributedStrategy in fleet.init(), "NEWLINE "which will take effect in distributed training.")NEWLINE self._user_defined_strategy = copy.deepcopy(strategy)NEWLINENEWLINE self._context = {}NEWLINENEWLINE if paddle.fluid.framework.in_dygraph_mode():NEWLINE if self.worker_num() > 1:NEWLINE return HybridParallelOptimizer(optimizer, self._hcg,NEWLINE self._user_defined_strategy)NEWLINE else:NEWLINE return optimizerNEWLINE return selfNEWLINENEWLINE @dygraph_onlyNEWLINE def distributed_model(self, model):NEWLINE """NEWLINE Return distributed data parallel model (Only work in dygraph mode)NEWLINENEWLINE Args:NEWLINE model (Layer): the user-defind model which inherits Layer.NEWLINENEWLINE Returns:NEWLINE distributed data parallel model which inherits Layer.NEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddleNEWLINE import paddle.nn as nnNEWLINE from paddle.distributed import fleetNEWLINENEWLINE class LinearNet(nn.Layer):NEWLINE def __init__(self):NEWLINE super(LinearNet, self).__init__()NEWLINE self._linear1 = nn.Linear(10, 10)NEWLINE self._linear2 = nn.Linear(10, 1)NEWLINENEWLINE def forward(self, x):NEWLINE return self._linear2(self._linear1(x))NEWLINENEWLINE # 1. initialize fleet environmentNEWLINE fleet.init(is_collective=True)NEWLINENEWLINE # 2. create layer & optimizerNEWLINE layer = LinearNet()NEWLINE loss_fn = nn.MSELoss()NEWLINE adam = paddle.optimizer.Adam(NEWLINE learning_rate=0.001, parameters=layer.parameters())NEWLINENEWLINE # 3. get data_parallel model using fleetNEWLINE adam = fleet.distributed_optimizer(adam)NEWLINE dp_layer = fleet.distributed_model(layer)NEWLINENEWLINE # 4. run layerNEWLINE inputs = paddle.randn([10, 10], 'float32')NEWLINE outputs = dp_layer(inputs)NEWLINE labels = paddle.randn([10, 1], 'float32')NEWLINE loss = loss_fn(outputs, labels)NEWLINENEWLINE print("loss:", loss.numpy())NEWLINENEWLINE loss.backward()NEWLINENEWLINE adam.step()NEWLINE adam.clear_grad()NEWLINENEWLINENEWLINE """NEWLINE assert model is not None, "model should not be None"NEWLINE if self.worker_num() <= 1:NEWLINE return modelNEWLINENEWLINE if self._hcg.get_parallel_mode() == ParallelMode.SHARDING_PARALLEL:NEWLINE distributed_model = ShardingParallel(NEWLINE model, self._hcg, strategy=self._user_defined_strategy)NEWLINE elif self._hcg.get_parallel_mode() == ParallelMode.DATA_PARALLEL:NEWLINE distributed_model = paddle.DataParallel(NEWLINE model,NEWLINE comm_buffer_size=self._user_defined_strategy.NEWLINE fuse_grad_size_in_MB,NEWLINE last_comm_buffer_size=self._user_defined_strategy.NEWLINE last_comm_group_size_MB,NEWLINE find_unused_parameters=self._user_defined_strategy.NEWLINE find_unused_parameters)NEWLINE elif self._hcg.get_parallel_mode() == ParallelMode.TENSOR_PARALLEL:NEWLINE distributed_model = TensorParallel(NEWLINE model, self._hcg, strategy=self._user_defined_strategy)NEWLINE elif self._hcg.get_parallel_mode() == ParallelMode.PIPELINE_PARALLEL:NEWLINE distributed_model = PipelineParallel(NEWLINE model, self._hcg, strategy=self._user_defined_strategy)NEWLINENEWLINE return distributed_modelNEWLINENEWLINE @dygraph_onlyNEWLINE def state_dict(self):NEWLINE """NEWLINE Get state dict information from optimizer.NEWLINE (Only work in dygraph mode)NEWLINENEWLINE Returns: NEWLINE state_dict(dict) : dict contains all the Tensor used by optimizerNEWLINENEWLINE Examples:NEWLINE .. code-block:: pythonNEWLINENEWLINE import numpy as npNEWLINE import paddleNEWLINE from paddle.distributed import fleetNEWLINENEWLINE fleet.init(is_collective=True)NEWLINENEWLINE value = np.arange(26).reshape(2, 13).astype("float32")NEWLINE a = paddle.to_tensor(value)NEWLINENEWLINE layer = paddle.nn.Linear(13, 5)NEWLINE adam = paddle.optimizer.Adam(learning_rate=0.01, parameters=layer.parameters())NEWLINENEWLINE adam = fleet.distributed_optimizer(adam)NEWLINE dp_layer = fleet.distributed_model(layer)NEWLINE state_dict = adam.state_dict()NEWLINE """NEWLINE # imitate target optimizer retrievalNEWLINE return self.user_defined_optimizer.state_dict()NEWLINENEWLINE @dygraph_onlyNEWLINE def set_state_dict(self, state_dict):NEWLINE """NEWLINE Load optimizer state dict.NEWLINE (Only work in dygraph mode)NEWLINENEWLINE Args: NEWLINE state_dict(dict) : Dict contains all the Tensor needed by optimizerNEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINE .. code-block:: pythonNEWLINENEWLINE import numpy as npNEWLINE import paddleNEWLINE from paddle.distributed import fleetNEWLINENEWLINE fleet.init(is_collective=True)NEWLINENEWLINE value = np.arange(26).reshape(2, 13).astype("float32")NEWLINE a = paddle.to_tensor(value)NEWLINENEWLINE layer = paddle.nn.Linear(13, 5)NEWLINE adam = paddle.optimizer.Adam(learning_rate=0.01, parameters=layer.parameters())NEWLINENEWLINE adam = fleet.distributed_optimizer(adam)NEWLINE dp_layer = fleet.distributed_model(layer)NEWLINE state_dict = adam.state_dict()NEWLINE paddle.save(state_dict, "paddle_dy")NEWLINE para_state_dict = paddle.load("paddle_dy")NEWLINE adam.set_state_dict(para_state_dict)NEWLINE """NEWLINE # imitate target optimizer retrievalNEWLINE return self.user_defined_optimizer.set_state_dict(state_dict)NEWLINENEWLINE @dygraph_onlyNEWLINE def set_lr(self, value):NEWLINE """NEWLINE Set the value of the learning rate manually in the optimizer. NEWLINE (Only work in dygraph mode)NEWLINENEWLINE Args:NEWLINE value (float|Tensor): the value of learning rateNEWLINENEWLINE Returns: NEWLINE None NEWLINENEWLINE Examples:NEWLINE .. code-block:: pythonNEWLINENEWLINE import numpy as npNEWLINE import paddleNEWLINE from paddle.distributed import fleetNEWLINENEWLINE fleet.init(is_collective=True)NEWLINENEWLINE value = np.arange(26).reshape(2, 13).astype("float32")NEWLINE a = paddle.to_tensor(value)NEWLINENEWLINE layer = paddle.nn.Linear(13, 5)NEWLINE adam = paddle.optimizer.Adam(learning_rate=0.01, parameters=layer.parameters())NEWLINENEWLINE adam = fleet.distributed_optimizer(adam)NEWLINE dp_layer = fleet.distributed_model(layer)NEWLINENEWLINE lr_list = [0.2, 0.3, 0.4, 0.5, 0.6]NEWLINE for i in range(5):NEWLINE adam.set_lr(lr_list[i])NEWLINE lr = adam.get_lr()NEWLINE print("current lr is {}".format(lr))NEWLINE # Print:NEWLINE # current lr is 0.2NEWLINE # current lr is 0.3NEWLINE # current lr is 0.4NEWLINE # current lr is 0.5NEWLINE # current lr is 0.6NEWLINE """NEWLINE # imitate target optimizer retrievalNEWLINE return self.user_defined_optimizer.set_lr(value)NEWLINENEWLINE @dygraph_onlyNEWLINE def get_lr(self):NEWLINE """NEWLINE Get current step learning rate.NEWLINE (Only work in dygraph mode)NEWLINENEWLINE Returns:NEWLINE float: The learning rate of the current step.NEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import numpy as npNEWLINE import paddleNEWLINE from paddle.distributed import fleetNEWLINENEWLINE fleet.init(is_collective=True)NEWLINENEWLINE value = np.arange(26).reshape(2, 13).astype("float32")NEWLINE a = paddle.to_tensor(value)NEWLINENEWLINE layer = paddle.nn.Linear(13, 5)NEWLINE adam = paddle.optimizer.Adam(learning_rate=0.01, parameters=layer.parameters())NEWLINENEWLINE adam = fleet.distributed_optimizer(adam)NEWLINE dp_layer = fleet.distributed_model(layer)NEWLINENEWLINE lr = adam.get_lr()NEWLINE print(lr) # 0.01NEWLINE """NEWLINE # imitate target optimizer retrievalNEWLINE return self.user_defined_optimizer.get_lr()NEWLINENEWLINE @dygraph_onlyNEWLINE def step(self):NEWLINE """NEWLINE Execute the optimizer once.NEWLINE (Only work in dygraph mode)NEWLINENEWLINE Returns:NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddleNEWLINE import paddle.nn as nnNEWLINE from paddle.distributed import fleetNEWLINENEWLINE class LinearNet(nn.Layer):NEWLINE def __init__(self):NEWLINE super(LinearNet, self).__init__()NEWLINE self._linear1 = nn.Linear(10, 10)NEWLINE self._linear2 = nn.Linear(10, 1)NEWLINENEWLINE def forward(self, x):NEWLINE return self._linear2(self._linear1(x))NEWLINENEWLINE # 1. initialize fleet environmentNEWLINE fleet.init(is_collective=True)NEWLINENEWLINE # 2. create layer & optimizerNEWLINE layer = LinearNet()NEWLINE loss_fn = nn.MSELoss()NEWLINE adam = paddle.optimizer.Adam(NEWLINE learning_rate=0.001, parameters=layer.parameters())NEWLINENEWLINE # 3. get data_parallel model using fleetNEWLINE adam = fleet.distributed_optimizer(adam)NEWLINE dp_layer = fleet.distributed_model(layer)NEWLINENEWLINE # 4. run layerNEWLINE inputs = paddle.randn([10, 10], 'float32')NEWLINE outputs = dp_layer(inputs)NEWLINE labels = paddle.randn([10, 1], 'float32')NEWLINE loss = loss_fn(outputs, labels)NEWLINENEWLINE print("loss:", loss.numpy())NEWLINENEWLINE loss.backward()NEWLINENEWLINE adam.step()NEWLINE adam.clear_grad()NEWLINENEWLINENEWLINE """NEWLINE # imitate target optimizer retrievalNEWLINE return self.user_defined_optimizer.step()NEWLINENEWLINE @dygraph_onlyNEWLINE def clear_grad(self):NEWLINE """NEWLINE Clear the gradients of all optimized parameters for model.NEWLINE (Only work in dygraph mode)NEWLINENEWLINE Returns: NEWLINE NoneNEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddleNEWLINE import paddle.nn as nnNEWLINE from paddle.distributed import fleetNEWLINENEWLINE class LinearNet(nn.Layer):NEWLINE def __init__(self):NEWLINE super(LinearNet, self).__init__()NEWLINE self._linear1 = nn.Linear(10, 10)NEWLINE self._linear2 = nn.Linear(10, 1)NEWLINENEWLINE def forward(self, x):NEWLINE return self._linear2(self._linear1(x))NEWLINENEWLINE # 1. initialize fleet environmentNEWLINE fleet.init(is_collective=True)NEWLINENEWLINE # 2. create layer & optimizerNEWLINE layer = LinearNet()NEWLINE loss_fn = nn.MSELoss()NEWLINE adam = paddle.optimizer.Adam(NEWLINE learning_rate=0.001, parameters=layer.parameters())NEWLINENEWLINE # 3. get data_parallel model using fleetNEWLINE adam = fleet.distributed_optimizer(adam)NEWLINE dp_layer = fleet.distributed_model(layer)NEWLINENEWLINE # 4. run layerNEWLINE inputs = paddle.randn([10, 10], 'float32')NEWLINE outputs = dp_layer(inputs)NEWLINE labels = paddle.randn([10, 1], 'float32')NEWLINE loss = loss_fn(outputs, labels)NEWLINENEWLINE print("loss:", loss.numpy())NEWLINENEWLINE loss.backward()NEWLINENEWLINE adam.step()NEWLINE adam.clear_grad()NEWLINENEWLINE """NEWLINE # imitate target optimizer retrievalNEWLINE return self.user_defined_optimizer.clear_grad()NEWLINENEWLINE def _get_amp_optimizer(self):NEWLINE # imitate target optimizer retrievalNEWLINE amp_optimizer = NoneNEWLINE for optimizer in self.strategy_compiler._get_applied_meta_optimizer():NEWLINE if hasattr(optimizer, 'amp_init'):NEWLINE amp_optimizer = optimizerNEWLINE breakNEWLINENEWLINE if amp_optimizer is None:NEWLINE if hasattr(self.user_defined_optimizer, 'amp_init'):NEWLINE amp_optimizer = self.user_defined_optimizerNEWLINENEWLINE assert amp_optimizer is not None, \NEWLINE "amp_init can only be used when the amp(auto mixed precision) strategy is turned on."NEWLINE return amp_optimizerNEWLINENEWLINE def get_loss_scaling(self):NEWLINE """Return the real-time loss scaling factor.NEWLINE """NEWLINE amp_optimizer = self._get_amp_optimizer()NEWLINE return amp_optimizer.get_loss_scaling()NEWLINENEWLINE def amp_init(self,NEWLINE place,NEWLINE scope=None,NEWLINE test_program=None,NEWLINE use_fp16_test=False):NEWLINE """NEWLINE Init the amp training, such as cast fp32 parameters to fp16 type.NEWLINE NEWLINE Args:NEWLINE place(CUDAPlace): place is used to initialize NEWLINE fp16 parameters with fp32 values.NEWLINE scope(Scope): The scope is used to find fp32 parameters.NEWLINE test_program(Program): The program is used for testing.NEWLINE use_fp16_test(bool): Whether to use fp16 testing.NEWLINE NEWLINE Examples:NEWLINE .. code-block:: pythonNEWLINENEWLINE import numpy as npNEWLINE import paddleNEWLINE import paddle.nn.functional as FNEWLINE paddle.enable_static()NEWLINENEWLINE def run_example_code():NEWLINE place = paddle.CUDAPlace(0)NEWLINE exe = paddle.static.Executor(place)NEWLINE data = paddle.static.data(name='X', shape=[None, 1, 28, 28], dtype='float32')NEWLINE conv2d = paddle.static.nn.conv2d(input=data, num_filters=6, filter_size=3)NEWLINE # 1) Use fp16_guard to control the range of fp16 kernels used.NEWLINE with paddle.static.amp.fp16_guard():NEWLINE bn = paddle.static.nn.batch_norm(input=conv2d, act="relu")NEWLINE pool = F.max_pool2d(bn, kernel_size=2, stride=2)NEWLINE hidden = paddle.static.nn.fc(pool, size=10)NEWLINE loss = paddle.mean(hidden)NEWLINE # 2) Create the optimizer and set `multi_precision` to True.NEWLINE # Setting `multi_precision` to True can avoid the poor accuracyNEWLINE # or the slow convergence in a way. NEWLINE optimizer = paddle.optimizer.Momentum(learning_rate=0.01, multi_precision=True)NEWLINE # 3) These ops in `custom_black_list` will keep in the float32 computation type.NEWLINE amp_list = paddle.static.amp.CustomOpLists(NEWLINE custom_black_list=['pool2d'])NEWLINE # 4) The entry of Paddle AMP.NEWLINE # Enable pure fp16 training by setting `use_pure_fp16` to True.NEWLINE optimizer = paddle.static.amp.decorate(NEWLINE optimizer,NEWLINE amp_list,NEWLINE init_loss_scaling=128.0,NEWLINE use_dynamic_loss_scaling=True,NEWLINE use_pure_fp16=True)NEWLINE # If you don't use the default_startup_program(), you sholud passNEWLINE # your defined `startup_program` into `minimize`.NEWLINE optimizer.minimize(loss)NEWLINE exe.run(paddle.static.default_startup_program())NEWLINE # 5) Use `amp_init` after FP32 parameters initialization(such as `exe.run(startup_program)`).NEWLINE # If you want to perform the testing process, you should pass `test_program` into `amp_init`.NEWLINE optimizer.amp_init(place, scope=paddle.static.global_scope())NEWLINE NEWLINE if paddle.is_compiled_with_cuda() and len(paddle.static.cuda_places()) > 0:NEWLINE run_example_code() NEWLINE """NEWLINE amp_optimizer = self._get_amp_optimizer()NEWLINE return amp_optimizer.amp_init(place, scope, test_program, use_fp16_test)NEWLINENEWLINE def _final_strategy(self):NEWLINE if "valid_strategy" not in self._context:NEWLINE print(NEWLINE "WARNING: You may need to call minimize function before this function is called"NEWLINE )NEWLINE return {}NEWLINE else:NEWLINE return self._context["valid_strategy"]NEWLINENEWLINE def _get_applied_meta_list(self):NEWLINE if "applied_meta_list" not in self._context:NEWLINE print(NEWLINE "WARNING: You may need to call minimize function before _get_applied_meta_list called"NEWLINE )NEWLINE return []NEWLINE else:NEWLINE return self._context["applied_meta_list"]NEWLINENEWLINE def _get_applied_graph_list(self):NEWLINE if "applied_graph_list" not in self._context:NEWLINE print(NEWLINE "WARNING: You may need to call minimize function before _get_applied_graph_list called"NEWLINE )NEWLINE return []NEWLINE else:NEWLINE return self._context["applied_graph_list"]NEWLINENEWLINE def minimize(self,NEWLINE loss,NEWLINE startup_program=None,NEWLINE parameter_list=None,NEWLINE no_grad_set=None):NEWLINE """NEWLINE Add distributed operations to minimize ``loss`` by updating ``parameter_list``.NEWLINENEWLINE Args:NEWLINE loss (Tensor): A ``Tensor`` containing the value to minimize.NEWLINE startup_program (Program, optional): :ref:`api_fluid_Program` forNEWLINE initializing parameters in ``parameter_list``. The default valueNEWLINE is None, at this time :ref:`api_fluid_default_startup_program` will be used.NEWLINE parameter_list (Iterable, optional): Iterable of ``Tensor`` or ``Tensor.name`` to updateNEWLINE to minimize ``loss``. The default value is None, at this time all parametersNEWLINE will be updated.NEWLINE no_grad_set (set, optional): Set of ``Tensor`` or ``Tensor.name`` that don't needNEWLINE to be updated. The default value is None.NEWLINENEWLINE Returns:NEWLINE tuple: tuple (optimize_ops, params_grads), A list of operators appendedNEWLINE by minimize and a list of (param, grad) tensor pairs, param isNEWLINE ``Parameter``, grad is the gradient value corresponding to the parameter.NEWLINE The returned tuple can be passed to ``fetch_list`` in ``Executor.run()`` toNEWLINE indicate program pruning. If so, the program will be pruned by ``feed`` andNEWLINE ``fetch_list`` before run, see details in ``Executor``.NEWLINENEWLINE Examples:NEWLINENEWLINE .. code-block:: pythonNEWLINENEWLINE import paddleNEWLINE paddle.enable_static()NEWLINE import paddle.distributed.fleet as fleetNEWLINE import paddle.nn.functional as FNEWLINENEWLINE hid_dim = 10NEWLINE label_dim = 2NEWLINE input_x = paddle.static.data(name='x', shape=[None, 13], dtype='float32')NEWLINE input_y = paddle.static.data(name='y', shape=[None, 1], dtype='int64')NEWLINE fc_1 = paddle.static.nn.fc(x=input_x, size=hid_dim, activation='tanh')NEWLINE fc_2 = paddle.static.nn.fc(x=fc_1, size=hid_dim, activation='tanh')NEWLINE prediction = paddle.static.nn.fc(x=[fc_2], size=label_dim, activation='softmax')NEWLINE cost = F.cross_entropy(input=prediction, label=input_y)NEWLINE avg_cost = paddle.mean(x=cost)NEWLINENEWLINE fleet.init(is_collective=True)NEWLINE strategy = fleet.DistributedStrategy()NEWLINE optimizer = paddle.optimizer.SGD(learning_rate=0.001)NEWLINE optimizer = fleet.distributed_optimizer(optimizer, strategy=strategy)NEWLINE optimizer.minimize(avg_cost)NEWLINENEWLINE # for more examples, please reference https://github.com/PaddlePaddle/FleetXNEWLINENEWLINE """NEWLINE context = {}NEWLINE context["user_defined_strategy"] = copy.deepcopy(NEWLINE self._user_defined_strategy)NEWLINE if paddle.fluid.framework.in_dygraph_mode():NEWLINE # imitate target optimizer retrievalNEWLINE target_opt = self.user_defined_optimizerNEWLINE self._context = contextNEWLINE return target_opt.minimize(loss)NEWLINENEWLINE # cache original feed forward programNEWLINE self.origin_main_program = loss.block.programNEWLINE context["origin_main_program"] = self.origin_main_programNEWLINE context["loss"] = lossNEWLINE if startup_program == None:NEWLINE self.origin_startup_program = \NEWLINE paddle.static.default_startup_program().clone(for_test=False)NEWLINE startup_program = paddle.static.default_startup_program()NEWLINE else:NEWLINE self.origin_startup_program = \NEWLINE startup_program.clone(for_test=False)NEWLINENEWLINE context["origin_startup_program"] = startup_programNEWLINE context["role_maker"] = self._role_makerNEWLINENEWLINE # compile timeNEWLINE distributed_optimizer_list = \NEWLINE MetaOptimizerFactory()._get_valid_meta_optimizers(NEWLINE self.user_defined_optimizer)NEWLINENEWLINE context["user_defined_strategy"] = copy.deepcopy(NEWLINE self._user_defined_strategy)NEWLINE copy_user_defined_strategy = copy.deepcopy(self._user_defined_strategy)NEWLINENEWLINE # trigger the auto-parallel in very strict conditionNEWLINE # strategy = DistributedStrategy()NEWLINE # strategy.auto = TrueNEWLINE # optimizer = paddle.optimizer.SGD(learning_rate=0.1)NEWLINE # optimizer = fleet.distributed_optimizer(optimizer, strategy)NEWLINE if copy_user_defined_strategy._is_strict_auto():NEWLINE # turn on all the strategy for each optimizerNEWLINE for opt in distributed_optimizer_list:NEWLINE opt._enable_strategy(copy_user_defined_strategy, context)NEWLINENEWLINE valid_optimizer_list = []NEWLINE valid_graph_optimizer_list = []NEWLINE can_not_apply_optimizer_list = []NEWLINE # recall meta optimizers for rankingNEWLINE for opt in distributed_optimizer_list:NEWLINE opt._set_basic_info(loss, self._role_maker,NEWLINE self.user_defined_optimizer,NEWLINE copy_user_defined_strategy)NEWLINE if opt._can_apply() and not opt._is_graph_out():NEWLINE valid_optimizer_list.append(opt)NEWLINE elif opt._can_apply() and opt._is_graph_out():NEWLINE valid_graph_optimizer_list.append(opt)NEWLINE else:NEWLINE can_not_apply_optimizer_list.append(opt)NEWLINE # combine recalled meta optimizers to be a valid meta optimizerNEWLINE meta_optimizer, graph_optimizer = \NEWLINE self.strategy_compiler.generate_optimizer(NEWLINE loss, self._role_maker, self.user_defined_optimizer,NEWLINE copy_user_defined_strategy, valid_optimizer_list,NEWLINE valid_graph_optimizer_list)NEWLINENEWLINE valid_strategy = self.strategy_compiler._get_valid_strategy(NEWLINE copy_user_defined_strategy, can_not_apply_optimizer_list)NEWLINENEWLINE context["valid_strategy"] = copy.deepcopy(valid_strategy)NEWLINENEWLINE applied_meta_list = self.strategy_compiler._get_applied_meta_list()NEWLINE applied_graph_list = self.strategy_compiler._get_applied_graph_list()NEWLINENEWLINE context['applied_meta_list'] = applied_meta_listNEWLINE context['applied_graph_list'] = applied_graph_listNEWLINENEWLINE self._context = contextNEWLINENEWLINE self.valid_strategy = valid_strategyNEWLINE self.valid_strategy._enable_env()NEWLINENEWLINE optimize_ops = []NEWLINE params_grads = []NEWLINENEWLINE if self._role_maker._is_non_distributed() and not self._is_collective:NEWLINE if self._runtime_handle is None:NEWLINE self._runtime_handle = RuntimeFactory()._create_runtime(context)NEWLINENEWLINE compiled_program = compiler.CompiledProgram(NEWLINE self.origin_main_program).with_data_parallel(NEWLINE loss_name=loss.name, share_vars_from=None)NEWLINE loss.block.program._graph = compiled_programNEWLINE return self.user_defined_optimizer.minimize(NEWLINE loss, startup_program, parameter_list, no_grad_set=no_grad_set)NEWLINENEWLINE if meta_optimizer:NEWLINE optimize_ops, params_grads = meta_optimizer.minimize(NEWLINE loss, startup_program, parameter_list, no_grad_set=no_grad_set)NEWLINENEWLINE default_program = paddle.static.default_main_program()NEWLINENEWLINE if id(default_program) != id(loss.block.program):NEWLINE paddle.fluid.framework.switch_main_program(loss.block.program)NEWLINENEWLINE else:NEWLINE optimize_ops, params_grads = self.user_defined_optimizer.minimize(NEWLINE loss, startup_program, parameter_list, no_grad_set=no_grad_set)NEWLINENEWLINE context["program_optimize_ops"] = optimize_opsNEWLINE context["program_params_grads"] = params_gradsNEWLINENEWLINE if graph_optimizer:NEWLINE optimize_ops, params_grads = graph_optimizer.minimize(NEWLINE loss, startup_program, parameter_list, no_grad_set=no_grad_set)NEWLINE # since we do not encourage users to use graph operationsNEWLINE # if a graph optimizer takes effect, mostlyNEWLINE # optimizers_ops and params_grads are NoneNEWLINE # i.e. users can not modify current computation graph anymoreNEWLINE context["graph_optimize_ops"] = optimize_opsNEWLINE context["graph_optimize_grads"] = params_gradsNEWLINENEWLINE if self._runtime_handle is None:NEWLINE self._runtime_handle = RuntimeFactory()._create_runtime(context)NEWLINENEWLINE import paddle.distributed.fleet as fleetNEWLINE fleet.util._set_strategy(context["valid_strategy"])NEWLINENEWLINE return optimize_ops, params_gradsNEWLINENEWLINE @dygraph_onlyNEWLINE def distributed_scaler(self, scaler):NEWLINE return HybridParallelGradScaler(scaler, self._hcg)NEWLINE
"""NEWLINEUnit and regression test for the analyze_foldamers package.NEWLINE"""NEWLINENEWLINE# Import package, test suite, and other packages as neededNEWLINEimport analyze_foldamersNEWLINEimport pytestNEWLINEimport sysNEWLINEimport osNEWLINEimport pickleNEWLINEfrom cg_openmm.cg_model.cgmodel import CGModelNEWLINEfrom analyze_foldamers.ensembles.cluster import *NEWLINENEWLINEcurrent_path = os.path.dirname(os.path.abspath(__file__))NEWLINEdata_path = os.path.join(current_path, 'test_data')NEWLINENEWLINEdef test_clustering_kmedoids_pdb(tmpdir):NEWLINE """Test Kmeans clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE n_clusters=2NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run KMeans clusteringNEWLINE medoid_positions, cluster_size, cluster_rmsd, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_KMedoids(NEWLINE pdb_file_list,NEWLINE cgmodel,NEWLINE n_clusters=n_clusters,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE )NEWLINE NEWLINE assert len(cluster_rmsd) == n_clustersNEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_1.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf") NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")NEWLINE NEWLINENEWLINEdef test_clustering_kmedoids_pdb_no_cgmodel(tmpdir):NEWLINE """Test Kmeans clustering without a cgmodel"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE n_clusters=2NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run KMeans clusteringNEWLINE medoid_positions, cluster_size, cluster_rmsd, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_KMedoids(NEWLINE pdb_file_list,NEWLINE cgmodel=None,NEWLINE n_clusters=n_clusters,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE )NEWLINE NEWLINE assert len(cluster_rmsd) == n_clustersNEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_1.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf") NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")NEWLINE NEWLINE NEWLINEdef test_clustering_kmedoids_dcd(tmpdir):NEWLINE """Test KMedoids clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE dcd_file_list = []NEWLINE for i in range(number_replicas):NEWLINE dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE n_clusters=2NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run KMeans clusteringNEWLINE medoid_positions, cluster_size, cluster_rmsd, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_KMedoids(NEWLINE dcd_file_list,NEWLINE cgmodel,NEWLINE n_clusters=n_clusters,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_format="dcd",NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE )NEWLINE NEWLINE assert len(cluster_rmsd) == n_clustersNEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_1.dcd")NEWLINE assert os.path.isfile(f"{output_directory}/silhouette_kmedoids_ncluster_{n_clusters}.pdf")NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf") NEWLINE NEWLINE NEWLINEdef test_clustering_dbscan_pdb(tmpdir):NEWLINE """Test DBSCAN clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=3NEWLINE eps=0.5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run DBSCAN density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_DBSCAN(NEWLINE pdb_file_list,NEWLINE cgmodel,NEWLINE min_samples=min_samples,NEWLINE eps=eps,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE core_points_only=False,NEWLINE )NEWLINE NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")NEWLINE NEWLINEdef test_clustering_dbscan_pdb_core_medoids(tmpdir):NEWLINE """Test DBSCAN clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=3NEWLINE eps=0.5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run DBSCAN density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_DBSCAN(NEWLINE pdb_file_list,NEWLINE cgmodel,NEWLINE min_samples=min_samples,NEWLINE eps=eps,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE core_points_only=True,NEWLINE )NEWLINE NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")NEWLINE NEWLINENEWLINEdef test_clustering_dbscan_pdb_no_cgmodel(tmpdir):NEWLINE """Test DBSCAN clustering without cgmodel object"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=3NEWLINE eps=0.5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run DBSCAN density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_DBSCAN(NEWLINE pdb_file_list,NEWLINE cgmodel = None,NEWLINE min_samples=min_samples,NEWLINE eps=eps,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE core_points_only=False,NEWLINE )NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")NEWLINENEWLINE NEWLINEdef test_clustering_dbscan_dcd(tmpdir):NEWLINE """Test DBSCAN clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE dcd_file_list = []NEWLINE for i in range(number_replicas):NEWLINE dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=3NEWLINE eps=0.5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run OPTICS density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_DBSCAN(NEWLINE dcd_file_list,NEWLINE cgmodel,NEWLINE min_samples=min_samples,NEWLINE eps=eps,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_format="dcd",NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE core_points_only=False,NEWLINE )NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.dcd") NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf") NEWLINE NEWLINE NEWLINEdef test_clustering_optics_pdb(tmpdir):NEWLINE """Test OPTICS clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run OPTICS density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_OPTICS(NEWLINE pdb_file_list,NEWLINE cgmodel,NEWLINE min_samples=min_samples,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE )NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf") NEWLINENEWLINEdef test_clustering_optics_pdb_no_cgmodel(tmpdir):NEWLINE """Test OPTICS clustering without a cgmodel object"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run OPTICS density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_OPTICS(NEWLINE pdb_file_list,NEWLINE cgmodel = None,NEWLINE min_samples=min_samples,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE )NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf") NEWLINENEWLINENEWLINE NEWLINEdef test_clustering_optics_dcd(tmpdir):NEWLINE """Test OPTICS clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE dcd_file_list = []NEWLINE for i in range(number_replicas):NEWLINE dcd_file_list.append(f"{data_path}/replica_%s.dcd" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run OPTICS density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_OPTICS(NEWLINE dcd_file_list,NEWLINE cgmodel,NEWLINE min_samples=min_samples,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_format="dcd",NEWLINE output_dir=output_directory,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE )NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.dcd") NEWLINE assert os.path.isfile(f"{output_directory}/distances_rmsd_hist.pdf")NEWLINENEWLINEdef test_clustering_dbscan_pdb_output_clusters(tmpdir):NEWLINE """Test DBSCAN clustering"""NEWLINE NEWLINE output_directory = tmpdir.mkdir("output")NEWLINE NEWLINE # Load in cgmodelNEWLINE cgmodel_path = os.path.join(data_path, "stored_cgmodel.pkl")NEWLINE cgmodel = pickle.load(open(cgmodel_path, "rb"))NEWLINE NEWLINE # Create list of trajectory files for clustering analysisNEWLINE number_replicas = 12NEWLINE pdb_file_list = []NEWLINE for i in range(number_replicas):NEWLINE pdb_file_list.append(f"{data_path}/replica_%s.pdb" %(i+1))NEWLINENEWLINE # Set clustering parametersNEWLINE min_samples=3NEWLINE eps=0.5NEWLINE frame_start=10NEWLINE frame_stride=1NEWLINE frame_end=-1NEWLINENEWLINE # Run DBSCAN density-based clusteringNEWLINE medoid_positions, cluster_sizes, cluster_rmsd, n_noise, silhouette_avg, labels, original_indices = \NEWLINE get_cluster_medoid_positions_DBSCAN(NEWLINE pdb_file_list,NEWLINE cgmodel,NEWLINE min_samples=min_samples,NEWLINE eps=eps,NEWLINE frame_start=frame_start,NEWLINE frame_stride=frame_stride,NEWLINE frame_end=-1,NEWLINE output_dir=output_directory,NEWLINE output_cluster_traj=True,NEWLINE plot_silhouette=True,NEWLINE plot_rmsd_hist=True,NEWLINE filter=True,NEWLINE filter_ratio=0.20,NEWLINE )NEWLINE assert len(labels) == len(original_indices)NEWLINE assert os.path.isfile(f"{output_directory}/medoid_0.pdb")NEWLINE assert os.path.isfile(f"{output_directory}/cluster_0.pdb")NEWLINE
#!/usr/bin/pythonNEWLINE# -*- coding: utf-8 -*-NEWLINENEWLINE# Copyright: (c) 2019, Dag Wieers (@dagwieers) <dag@wieers.com>NEWLINE# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt)NEWLINENEWLINEfrom __future__ import absolute_import, division, print_functionNEWLINE__metaclass__ = typeNEWLINENEWLINEANSIBLE_METADATA = {'metadata_version': '1.1',NEWLINE 'status': ['preview'],NEWLINE 'supported_by': 'community'}NEWLINENEWLINEDOCUMENTATION = r'''NEWLINE---NEWLINEmodule: mso_schema_site_anpNEWLINEshort_description: Manage site-local Application Network Profiles (ANPs) in schema templateNEWLINEdescription:NEWLINE- Manage site-local ANPs in schema template on Cisco ACI Multi-Site.NEWLINEauthor:NEWLINE- Dag Wieers (@dagwieers)NEWLINEoptions:NEWLINE schema:NEWLINE description:NEWLINE - The name of the schema.NEWLINE type: strNEWLINE required: yesNEWLINE site:NEWLINE description:NEWLINE - The name of the site.NEWLINE type: strNEWLINE required: yesNEWLINE template:NEWLINE description:NEWLINE - The name of the template.NEWLINE type: strNEWLINE required: yesNEWLINE anp:NEWLINE description:NEWLINE - The name of the ANP to manage.NEWLINE type: strNEWLINE aliases: [ name ]NEWLINE state:NEWLINE description:NEWLINE - Use C(present) or C(absent) for adding or removing.NEWLINE - Use C(query) for listing an object or multiple objects.NEWLINE type: strNEWLINE choices: [ absent, present, query ]NEWLINE default: presentNEWLINEseealso:NEWLINE- module: cisco.mso.mso_schema_siteNEWLINE- module: cisco.mso.mso_schema_site_anp_epgNEWLINE- module: cisco.mso.mso_schema_template_anpNEWLINEextends_documentation_fragment: cisco.mso.modulesNEWLINE'''NEWLINENEWLINEEXAMPLES = r'''NEWLINE- name: Add a new site ANPNEWLINE cisco.mso.mso_schema_site_anp:NEWLINE host: mso_hostNEWLINE username: adminNEWLINE password: SomeSecretPasswordNEWLINE schema: Schema1NEWLINE site: Site1NEWLINE template: Template1NEWLINE anp: ANP1NEWLINE state: presentNEWLINE delegate_to: localhostNEWLINENEWLINE- name: Remove a site ANPNEWLINE cisco.mso.mso_schema_site_anp:NEWLINE host: mso_hostNEWLINE username: adminNEWLINE password: SomeSecretPasswordNEWLINE schema: Schema1NEWLINE site: Site1NEWLINE template: Template1NEWLINE anp: ANP1NEWLINE state: absentNEWLINE delegate_to: localhostNEWLINENEWLINE- name: Query a specific site ANPNEWLINE cisco.mso.mso_schema_site_anp:NEWLINE host: mso_hostNEWLINE username: adminNEWLINE password: SomeSecretPasswordNEWLINE schema: Schema1NEWLINE site: Site1NEWLINE template: Template1NEWLINE anp: ANP1NEWLINE state: queryNEWLINE delegate_to: localhostNEWLINE register: query_resultNEWLINENEWLINE- name: Query all site ANPsNEWLINE cisco.mso.mso_schema_site_anp:NEWLINE host: mso_hostNEWLINE username: adminNEWLINE password: SomeSecretPasswordNEWLINE schema: Schema1NEWLINE site: Site1NEWLINE template: Template1NEWLINE state: queryNEWLINE delegate_to: localhostNEWLINE register: query_resultNEWLINE'''NEWLINENEWLINERETURN = r'''NEWLINE'''NEWLINENEWLINEfrom ansible.module_utils.basic import AnsibleModuleNEWLINEfrom ansible_collections.cisco.mso.plugins.module_utils.mso import MSOModule, mso_argument_specNEWLINENEWLINENEWLINEdef main():NEWLINE argument_spec = mso_argument_spec()NEWLINE argument_spec.update(NEWLINE schema=dict(type='str', required=True),NEWLINE site=dict(type='str', required=True),NEWLINE template=dict(type='str', required=True),NEWLINE anp=dict(type='str', aliases=['name']), # This parameter is not required for querying all objectsNEWLINE state=dict(type='str', default='present', choices=['absent', 'present', 'query']),NEWLINE )NEWLINENEWLINE module = AnsibleModule(NEWLINE argument_spec=argument_spec,NEWLINE supports_check_mode=True,NEWLINE required_if=[NEWLINE ['state', 'absent', ['anp']],NEWLINE ['state', 'present', ['anp']],NEWLINE ],NEWLINE )NEWLINENEWLINE schema = module.params.get('schema')NEWLINE site = module.params.get('site')NEWLINE template = module.params.get('template')NEWLINE anp = module.params.get('anp')NEWLINE state = module.params.get('state')NEWLINENEWLINE mso = MSOModule(module)NEWLINENEWLINE # Get schema_idNEWLINE schema_obj = mso.get_obj('schemas', displayName=schema)NEWLINE if not schema_obj:NEWLINE mso.fail_json(msg="Provided schema '{0}' does not exist".format(schema))NEWLINENEWLINE schema_path = 'schemas/{id}'.format(**schema_obj)NEWLINE schema_id = schema_obj.get('id')NEWLINENEWLINE # Get siteNEWLINE site_id = mso.lookup_site(site)NEWLINENEWLINE # Get site_idxNEWLINE if 'sites' not in schema_obj:NEWLINE mso.fail_json(msg="No site associated with template '{0}'. Associate the site with the template using mso_schema_site.".format(template))NEWLINE sites = [(s.get('siteId'), s.get('templateName')) for s in schema_obj.get('sites')]NEWLINE if (site_id, template) not in sites:NEWLINE mso.fail_json(msg="Provided site/template '{0}-{1}' does not exist. Existing sites/templates: {2}".format(site, template, ', '.join(sites)))NEWLINENEWLINE # Schema-access uses indexesNEWLINE site_idx = sites.index((site_id, template))NEWLINE # Path-based access uses site_id-templateNEWLINE site_template = '{0}-{1}'.format(site_id, template)NEWLINENEWLINE # Get ANPNEWLINE anp_ref = mso.anp_ref(schema_id=schema_id, template=template, anp=anp)NEWLINE anps = [a.get('anpRef') for a in schema_obj.get('sites')[site_idx]['anps']]NEWLINENEWLINE if anp is not None and anp_ref in anps:NEWLINE anp_idx = anps.index(anp_ref)NEWLINE anp_path = '/sites/{0}/anps/{1}'.format(site_template, anp)NEWLINE mso.existing = schema_obj.get('sites')[site_idx]['anps'][anp_idx]NEWLINENEWLINE if state == 'query':NEWLINE if anp is None:NEWLINE mso.existing = schema_obj.get('sites')[site_idx]['anps']NEWLINE elif not mso.existing:NEWLINE mso.fail_json(msg="ANP '{anp}' not found".format(anp=anp))NEWLINE mso.exit_json()NEWLINENEWLINE anps_path = '/sites/{0}/anps'.format(site_template)NEWLINE ops = []NEWLINENEWLINE mso.previous = mso.existingNEWLINE if state == 'absent':NEWLINE if mso.existing:NEWLINE mso.sent = mso.existing = {}NEWLINE ops.append(dict(op='remove', path=anp_path))NEWLINENEWLINE elif state == 'present':NEWLINENEWLINE payload = dict(NEWLINE anpRef=dict(NEWLINE schemaId=schema_id,NEWLINE templateName=template,NEWLINE anpName=anp,NEWLINE ),NEWLINE )NEWLINENEWLINE mso.sanitize(payload, collate=True)NEWLINENEWLINE if not mso.existing:NEWLINE ops.append(dict(op='add', path=anps_path + '/-', value=mso.sent))NEWLINENEWLINE mso.existing = mso.proposedNEWLINENEWLINE if not module.check_mode:NEWLINE mso.request(schema_path, method='PATCH', data=ops)NEWLINENEWLINE mso.exit_json()NEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE main()NEWLINE
"""Unit tests for the copy module."""NEWLINENEWLINEimport sysNEWLINEimport copyNEWLINEimport copy_regNEWLINENEWLINEimport unittestNEWLINEfrom test import test_supportNEWLINENEWLINEclass TestCopy(unittest.TestCase):NEWLINENEWLINE # Attempt full line coverage of copy.py from top to bottomNEWLINENEWLINE def test_exceptions(self):NEWLINE self.assert_(copy.Error is copy.error)NEWLINE self.assert_(issubclass(copy.Error, Exception))NEWLINENEWLINE # The copy() methodNEWLINENEWLINE def test_copy_basic(self):NEWLINE x = 42NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(x, y)NEWLINENEWLINE def test_copy_copy(self):NEWLINE class C(object):NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __copy__(self):NEWLINE return C(self.foo)NEWLINE x = C(42)NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(y.__class__, x.__class__)NEWLINE self.assertEqual(y.foo, x.foo)NEWLINENEWLINE def test_copy_registry(self):NEWLINE class C(object):NEWLINE def __new__(cls, foo):NEWLINE obj = object.__new__(cls)NEWLINE obj.foo = fooNEWLINE return objNEWLINE def pickle_C(obj):NEWLINE return (C, (obj.foo,))NEWLINE x = C(42)NEWLINE self.assertRaises(TypeError, copy.copy, x)NEWLINE copy_reg.pickle(C, pickle_C, C)NEWLINE y = copy.copy(x)NEWLINENEWLINE def test_copy_reduce_ex(self):NEWLINE class C(object):NEWLINE def __reduce_ex__(self, proto):NEWLINE return ""NEWLINE def __reduce__(self):NEWLINE raise test_support.TestFailed, "shouldn't call this"NEWLINE x = C()NEWLINE y = copy.copy(x)NEWLINE self.assert_(y is x)NEWLINENEWLINE def test_copy_reduce(self):NEWLINE class C(object):NEWLINE def __reduce__(self):NEWLINE return ""NEWLINE x = C()NEWLINE y = copy.copy(x)NEWLINE self.assert_(y is x)NEWLINENEWLINE def test_copy_cant(self):NEWLINE class C(object):NEWLINE def __getattribute__(self, name):NEWLINE if name.startswith("__reduce"):NEWLINE raise AttributeError, nameNEWLINE return object.__getattribute__(self, name)NEWLINE x = C()NEWLINE self.assertRaises(copy.Error, copy.copy, x)NEWLINENEWLINE # Type-specific _copy_xxx() methodsNEWLINENEWLINE def test_copy_atomic(self):NEWLINE class Classic:NEWLINE passNEWLINE class NewStyle(object):NEWLINE passNEWLINE def f():NEWLINE passNEWLINE tests = [None, 42, 2L**100, 3.14, True, False, 1j,NEWLINE "hello", u"hello\u1234", f.func_code,NEWLINE NewStyle, xrange(10), Classic, max]NEWLINE for x in tests:NEWLINE self.assert_(copy.copy(x) is x, repr(x))NEWLINENEWLINE def test_copy_list(self):NEWLINE x = [1, 2, 3]NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_tuple(self):NEWLINE x = (1, 2, 3)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_dict(self):NEWLINE x = {"foo": 1, "bar": 2}NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_inst_vanilla(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C(42)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_inst_copy(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __copy__(self):NEWLINE return C(self.foo)NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C(42)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_inst_getinitargs(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __getinitargs__(self):NEWLINE return (self.foo,)NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C(42)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_inst_getstate(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __getstate__(self):NEWLINE return {"foo": self.foo}NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C(42)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_inst_setstate(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __setstate__(self, state):NEWLINE self.foo = state["foo"]NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C(42)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_copy_inst_getstate_setstate(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __getstate__(self):NEWLINE return self.fooNEWLINE def __setstate__(self, state):NEWLINE self.foo = stateNEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C(42)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE # tests for copying extension types, iff module trycopy is installedNEWLINE def test_copy_classictype(self):NEWLINE print "check here\n"NEWLINE# from _testcapi import make_copyableNEWLINE# x = make_copyable([23])NEWLINE# y = copy.copy(x)NEWLINE# self.assertEqual(x, y)NEWLINE# self.assertEqual(x.tag, y.tag)NEWLINE# self.assert_(x is not y)NEWLINE# self.assert_(x.tag is y.tag)NEWLINENEWLINE def test_deepcopy_classictype(self):NEWLINE print "check here\n"NEWLINE# from _testcapi import make_copyableNEWLINE# x = make_copyable([23])NEWLINE# y = copy.deepcopy(x)NEWLINE# self.assertEqual(x, y)NEWLINE# self.assertEqual(x.tag, y.tag)NEWLINE# self.assert_(x is not y)NEWLINE# self.assert_(x.tag is not y.tag)NEWLINENEWLINE # regression tests for class-vs-instance and metaclass-confusionNEWLINE def test_copy_classoverinstance(self):NEWLINE class C(object):NEWLINE def __init__(self, v):NEWLINE self.v = vNEWLINE def __cmp__(self, other):NEWLINE return -cmp(other, self.v)NEWLINE def __copy__(self):NEWLINE return self.__class__(self.v)NEWLINE x = C(23)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINE x.__copy__ = lambda: 42NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_deepcopy_classoverinstance(self):NEWLINE class C(object):NEWLINE def __init__(self, v):NEWLINE self.v = vNEWLINE def __cmp__(self, other):NEWLINE return -cmp(other, self.v)NEWLINE def __deepcopy__(self, memo):NEWLINE return self.__class__(copy.deepcopy(self.v, memo))NEWLINE x = C(23)NEWLINE self.assertEqual(copy.deepcopy(x), x)NEWLINE x.__deepcopy__ = lambda memo: 42NEWLINE self.assertEqual(copy.deepcopy(x), x)NEWLINENEWLINENEWLINE def test_copy_metaclassconfusion(self):NEWLINE class MyOwnError(copy.Error):NEWLINE passNEWLINE class Meta(type):NEWLINE def __copy__(cls):NEWLINE raise MyOwnError("can't copy classes w/this metaclass")NEWLINE class C:NEWLINE __metaclass__ = MetaNEWLINE def __init__(self, tag):NEWLINE self.tag = tagNEWLINE def __cmp__(self, other):NEWLINE return -cmp(other, self.tag)NEWLINE # the metaclass can forbid shallow copying of its classesNEWLINE self.assertRaises(MyOwnError, copy.copy, C)NEWLINE # check that there is no interference with instancesNEWLINE x = C(23)NEWLINE self.assertEqual(copy.copy(x), x)NEWLINENEWLINE def test_deepcopy_metaclassconfusion(self):NEWLINE class MyOwnError(copy.Error):NEWLINE passNEWLINE class Meta(type):NEWLINE def __deepcopy__(cls, memo):NEWLINE raise MyOwnError("can't deepcopy classes w/this metaclass")NEWLINE class C:NEWLINE __metaclass__ = MetaNEWLINE def __init__(self, tag):NEWLINE self.tag = tagNEWLINE def __cmp__(self, other):NEWLINE return -cmp(other, self.tag)NEWLINE # types are ALWAYS deepcopied atomically, no matter whatNEWLINE self.assertEqual(copy.deepcopy(C), C)NEWLINE # check that there is no interference with instancesNEWLINE x = C(23)NEWLINE self.assertEqual(copy.deepcopy(x), x)NEWLINENEWLINE def _nomro(self):NEWLINE class C(type):NEWLINE def __getattribute__(self, attr):NEWLINE if attr == '__mro__':NEWLINE raise AttributeError, "What, *me*, a __mro__? Nevah!"NEWLINE return super(C, self).__getattribute__(attr)NEWLINE class D(object):NEWLINE __metaclass__ = CNEWLINE return D()NEWLINENEWLINE def test_copy_mro(self):NEWLINE x = self._nomro()NEWLINE y = copy.copy(x)NEWLINENEWLINE def test_deepcopy_mro(self):NEWLINE x = self._nomro()NEWLINE y = copy.deepcopy(x)NEWLINENEWLINE # The deepcopy() methodNEWLINENEWLINE def test_deepcopy_basic(self):NEWLINE x = 42NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINENEWLINE def test_deepcopy_memo(self):NEWLINE # Tests of reflexive objects are under type-specific sections below.NEWLINE # This tests only repetitions of objects.NEWLINE x = []NEWLINE x = [x, x]NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y[0] is not x[0])NEWLINE self.assert_(y[0] is y[1])NEWLINENEWLINE def test_deepcopy_issubclass(self):NEWLINE # XXX Note: there's no way to test the TypeError coming out ofNEWLINE # issubclass() -- this can only happen when an extensionNEWLINE # module defines a "type" that doesn't formally inherit fromNEWLINE # type.NEWLINE class Meta(type):NEWLINE passNEWLINE class C:NEWLINE __metaclass__ = MetaNEWLINE self.assertEqual(copy.deepcopy(C), C)NEWLINENEWLINE def test_deepcopy_deepcopy(self):NEWLINE class C(object):NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __deepcopy__(self, memo=None):NEWLINE return C(self.foo)NEWLINE x = C(42)NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y.__class__, x.__class__)NEWLINE self.assertEqual(y.foo, x.foo)NEWLINENEWLINE def test_deepcopy_registry(self):NEWLINE class C(object):NEWLINE def __new__(cls, foo):NEWLINE obj = object.__new__(cls)NEWLINE obj.foo = fooNEWLINE return objNEWLINE def pickle_C(obj):NEWLINE return (C, (obj.foo,))NEWLINE x = C(42)NEWLINE self.assertRaises(TypeError, copy.deepcopy, x)NEWLINE copy_reg.pickle(C, pickle_C, C)NEWLINE y = copy.deepcopy(x)NEWLINENEWLINE def test_deepcopy_reduce_ex(self):NEWLINE class C(object):NEWLINE def __reduce_ex__(self, proto):NEWLINE return ""NEWLINE def __reduce__(self):NEWLINE raise test_support.TestFailed, "shouldn't call this"NEWLINE x = C()NEWLINE y = copy.deepcopy(x)NEWLINE self.assert_(y is x)NEWLINENEWLINE def test_deepcopy_reduce(self):NEWLINE class C(object):NEWLINE def __reduce__(self):NEWLINE return ""NEWLINE x = C()NEWLINE y = copy.deepcopy(x)NEWLINE self.assert_(y is x)NEWLINENEWLINE def test_deepcopy_cant(self):NEWLINE class C(object):NEWLINE def __getattribute__(self, name):NEWLINE if name.startswith("__reduce"):NEWLINE raise AttributeError, nameNEWLINE return object.__getattribute__(self, name)NEWLINE x = C()NEWLINE self.assertRaises(copy.Error, copy.deepcopy, x)NEWLINENEWLINE # Type-specific _deepcopy_xxx() methodsNEWLINENEWLINE def test_deepcopy_atomic(self):NEWLINE class Classic:NEWLINE passNEWLINE class NewStyle(object):NEWLINE passNEWLINE def f():NEWLINE passNEWLINE tests = [None, 42, 2L**100, 3.14, True, False, 1j,NEWLINE "hello", u"hello\u1234", f.func_code,NEWLINE NewStyle, xrange(10), Classic, max]NEWLINE for x in tests:NEWLINE self.assert_(copy.deepcopy(x) is x, repr(x))NEWLINENEWLINE def test_deepcopy_list(self):NEWLINE x = [[1, 2], 3]NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x[0] is not y[0])NEWLINENEWLINE def test_deepcopy_reflexive_list(self):NEWLINE x = []NEWLINE x.append(x)NEWLINE y = copy.deepcopy(x)NEWLINE self.assertRaises(RuntimeError, cmp, y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y[0] is y)NEWLINE self.assertEqual(len(y), 1)NEWLINENEWLINE def test_deepcopy_tuple(self):NEWLINE x = ([1, 2], 3)NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x[0] is not y[0])NEWLINENEWLINE def test_deepcopy_reflexive_tuple(self):NEWLINE x = ([],)NEWLINE x[0].append(x)NEWLINE y = copy.deepcopy(x)NEWLINE self.assertRaises(RuntimeError, cmp, y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y[0] is not x[0])NEWLINE self.assert_(y[0][0] is y)NEWLINENEWLINE def test_deepcopy_dict(self):NEWLINE x = {"foo": [1, 2], "bar": 3}NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x["foo"] is not y["foo"])NEWLINENEWLINE def test_deepcopy_reflexive_dict(self):NEWLINE x = {}NEWLINE x['foo'] = xNEWLINE y = copy.deepcopy(x)NEWLINE self.assertRaises(RuntimeError, cmp, y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y['foo'] is y)NEWLINE self.assertEqual(len(y), 1)NEWLINENEWLINE def test_deepcopy_keepalive(self):NEWLINE memo = {}NEWLINE x = 42NEWLINE y = copy.deepcopy(x, memo)NEWLINE self.assert_(memo[id(x)] is x)NEWLINENEWLINE def test_deepcopy_inst_vanilla(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C([42])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_deepcopy_inst_deepcopy(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __deepcopy__(self, memo):NEWLINE return C(copy.deepcopy(self.foo, memo))NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C([42])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_deepcopy_inst_getinitargs(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __getinitargs__(self):NEWLINE return (self.foo,)NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C([42])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_deepcopy_inst_getstate(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __getstate__(self):NEWLINE return {"foo": self.foo}NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C([42])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_deepcopy_inst_setstate(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __setstate__(self, state):NEWLINE self.foo = state["foo"]NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C([42])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_deepcopy_inst_getstate_setstate(self):NEWLINE class C:NEWLINE def __init__(self, foo):NEWLINE self.foo = fooNEWLINE def __getstate__(self):NEWLINE return self.fooNEWLINE def __setstate__(self, state):NEWLINE self.foo = stateNEWLINE def __cmp__(self, other):NEWLINE return cmp(self.foo, other.foo)NEWLINE x = C([42])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_deepcopy_reflexive_inst(self):NEWLINE class C:NEWLINE passNEWLINE x = C()NEWLINE x.foo = xNEWLINE y = copy.deepcopy(x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y.foo is y)NEWLINENEWLINE # _reconstruct()NEWLINENEWLINE def test_reconstruct_string(self):NEWLINE class C(object):NEWLINE def __reduce__(self):NEWLINE return ""NEWLINE x = C()NEWLINE y = copy.copy(x)NEWLINE self.assert_(y is x)NEWLINE y = copy.deepcopy(x)NEWLINE self.assert_(y is x)NEWLINENEWLINE def test_reconstruct_nostate(self):NEWLINE class C(object):NEWLINE def __reduce__(self):NEWLINE return (C, ())NEWLINE x = C()NEWLINE x.foo = 42NEWLINE y = copy.copy(x)NEWLINE self.assert_(y.__class__ is x.__class__)NEWLINE y = copy.deepcopy(x)NEWLINE self.assert_(y.__class__ is x.__class__)NEWLINENEWLINE def test_reconstruct_state(self):NEWLINE class C(object):NEWLINE def __reduce__(self):NEWLINE return (C, (), self.__dict__)NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.__dict__, other.__dict__)NEWLINE x = C()NEWLINE x.foo = [42]NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(y, x)NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_reconstruct_state_setstate(self):NEWLINE class C(object):NEWLINE def __reduce__(self):NEWLINE return (C, (), self.__dict__)NEWLINE def __setstate__(self, state):NEWLINE self.__dict__.update(state)NEWLINE def __cmp__(self, other):NEWLINE return cmp(self.__dict__, other.__dict__)NEWLINE x = C()NEWLINE x.foo = [42]NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(y, x)NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(y, x)NEWLINE self.assert_(y.foo is not x.foo)NEWLINENEWLINE def test_reconstruct_reflexive(self):NEWLINE class C(object):NEWLINE passNEWLINE x = C()NEWLINE x.foo = xNEWLINE y = copy.deepcopy(x)NEWLINE self.assert_(y is not x)NEWLINE self.assert_(y.foo is y)NEWLINENEWLINE # Additions for Python 2.3 and pickle protocol 2NEWLINENEWLINE def test_reduce_4tuple(self):NEWLINE class C(list):NEWLINE def __reduce__(self):NEWLINE return (C, (), self.__dict__, iter(self))NEWLINE def __cmp__(self, other):NEWLINE return (cmp(list(self), list(other)) orNEWLINE cmp(self.__dict__, other.__dict__))NEWLINE x = C([[1, 2], 3])NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(x, y)NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x[0] is y[0])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(x, y)NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x[0] is not y[0])NEWLINENEWLINE def test_reduce_5tuple(self):NEWLINE class C(dict):NEWLINE def __reduce__(self):NEWLINE return (C, (), self.__dict__, None, self.iteritems())NEWLINE def __cmp__(self, other):NEWLINE return (cmp(dict(self), list(dict)) orNEWLINE cmp(self.__dict__, other.__dict__))NEWLINE x = C([("foo", [1, 2]), ("bar", 3)])NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(x, y)NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x["foo"] is y["foo"])NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(x, y)NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x["foo"] is not y["foo"])NEWLINENEWLINE def test_copy_slots(self):NEWLINE class C(object):NEWLINE __slots__ = ["foo"]NEWLINE x = C()NEWLINE x.foo = [42]NEWLINE y = copy.copy(x)NEWLINE self.assert_(x.foo is y.foo)NEWLINENEWLINE def test_deepcopy_slots(self):NEWLINE class C(object):NEWLINE __slots__ = ["foo"]NEWLINE x = C()NEWLINE x.foo = [42]NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(x.foo, y.foo)NEWLINE self.assert_(x.foo is not y.foo)NEWLINENEWLINE def test_copy_list_subclass(self):NEWLINE class C(list):NEWLINE passNEWLINE x = C([[1, 2], 3])NEWLINE x.foo = [4, 5]NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(list(x), list(y))NEWLINE self.assertEqual(x.foo, y.foo)NEWLINE self.assert_(x[0] is y[0])NEWLINE self.assert_(x.foo is y.foo)NEWLINENEWLINE def test_deepcopy_list_subclass(self):NEWLINE class C(list):NEWLINE passNEWLINE x = C([[1, 2], 3])NEWLINE x.foo = [4, 5]NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(list(x), list(y))NEWLINE self.assertEqual(x.foo, y.foo)NEWLINE self.assert_(x[0] is not y[0])NEWLINE self.assert_(x.foo is not y.foo)NEWLINENEWLINE def test_copy_tuple_subclass(self):NEWLINE class C(tuple):NEWLINE passNEWLINE x = C([1, 2, 3])NEWLINE self.assertEqual(tuple(x), (1, 2, 3))NEWLINE y = copy.copy(x)NEWLINE self.assertEqual(tuple(y), (1, 2, 3))NEWLINENEWLINE def test_deepcopy_tuple_subclass(self):NEWLINE class C(tuple):NEWLINE passNEWLINE x = C([[1, 2], 3])NEWLINE self.assertEqual(tuple(x), ([1, 2], 3))NEWLINE y = copy.deepcopy(x)NEWLINE self.assertEqual(tuple(y), ([1, 2], 3))NEWLINE self.assert_(x is not y)NEWLINE self.assert_(x[0] is not y[0])NEWLINENEWLINE def test_getstate_exc(self):NEWLINE class EvilState(object):NEWLINE def __getstate__(self):NEWLINE raise ValueError, "ain't got no stickin' state"NEWLINE self.assertRaises(ValueError, copy.copy, EvilState())NEWLINENEWLINEdef test_main():NEWLINE test_support.run_unittest(TestCopy)NEWLINENEWLINEif __name__ == "__main__":NEWLINE test_main()NEWLINE
#!/usr/bin/env/ pythonNEWLINEprint ("Hola mundo")NEWLINENEWLINE# TIPOS DE DATOSNEWLINENEWLINE# Esto e unha cadeaNEWLINEc = "Hola mundo"NEWLINENEWLINE# Esto e un enteiroNEWLINEe = 23NEWLINENEWLINE# Esto e un longNEWLINElong = 23NEWLINENEWLINE# Numero en octalNEWLINEoctal = 0o27NEWLINENEWLINE# Numero en HexadecimalNEWLINEhexDecimal = 0x3452334NEWLINENEWLINE# Numero con decimalesNEWLINEreal = 23.334223NEWLINENEWLINE# Numero con decimales en notacion cientificaNEWLINEcientifico = 0.1e-3NEWLINENEWLINE# Podese comprobar coa funcion typeNEWLINEprint(type(c))NEWLINEprint(type(e))NEWLINEprint(type(long))NEWLINEprint(octal)NEWLINEprint(hexDecimal)NEWLINENEWLINENEWLINE
# Generated by Django 3.2 on 2021-06-24 12:18NEWLINENEWLINEfrom django.db import migrations, modelsNEWLINENEWLINENEWLINEclass Migration(migrations.Migration):NEWLINENEWLINE dependencies = [NEWLINE ('blog_email', '0001_initial'),NEWLINE ]NEWLINENEWLINE operations = [NEWLINE migrations.AlterField(NEWLINE model_name='email',NEWLINE name='id',NEWLINE field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),NEWLINE ),NEWLINE ]NEWLINE
from config import BLACK, WHITE, EMPTYNEWLINEfrom random import randintNEWLINENEWLINEclass Evaluator(object):NEWLINE def __init__(self, level):NEWLINE super().__init__()NEWLINE self.level = levelNEWLINE if(level == 1):NEWLINE self.WEIGHT_MATRIX = self.WORST_WEIGHT_MATRIXNEWLINE if(level == 2):NEWLINE self.WEIGHT_MATRIX = self.MEDIUM_WEIGHT_MATRIXNEWLINE if(level == 3):NEWLINE self.WEIGHT_MATRIX = self.BEST_WEIGHT_MATRIXNEWLINENEWLINE WORST_WEIGHT_MATRIX = [NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE [ 1, 1, 1, 1, 1, 1, 1, 1],NEWLINE ]NEWLINENEWLINE MEDIUM_WEIGHT_MATRIX = [NEWLINE [ 4,-3, 2, 2, 2, 2,-3, 4],NEWLINE [-3,-4,-1,-1,-1,-1,-4,-3],NEWLINE [ 2,-1, 1, 0, 0, 1,-1, 2],NEWLINE [ 2,-1, 0, 1, 1, 0,-1, 2],NEWLINE [ 2,-1, 0, 1, 1, 0,-1, 2],NEWLINE [ 2,-1, 1, 0, 0, 1,-1, 2],NEWLINE [-3,-4,-1,-1,-1,-1,-4,-3],NEWLINE [ 4,-3, 2, 2, 2, 2,-3, 4],NEWLINE ]NEWLINENEWLINE BEST_WEIGHT_MATRIX = [NEWLINE [64,-8, 8, 8, 8, 8,-8,64],NEWLINE [-8,-8,-2,-2,-2,-2,-8,-8],NEWLINE [ 8,-2, 1, 1, 1, 1,-2, 8],NEWLINE [ 8,-2, 1, 1, 1, 1,-2, 8],NEWLINE [ 8,-2, 1, 1, 1, 1,-2, 8],NEWLINE [ 8,-2, 1, 1, 1, 1,-2, 8],NEWLINE [-8,-8,-2,-2,-2,-2,-8,-8],NEWLINE [64,-8, 8, 8, 8, 8,-8,64],NEWLINE ]NEWLINENEWLINENEWLINE def score(self, board, currentDepth, player, opponent):NEWLINE whites, blacks, empty = board.count_stones()NEWLINENEWLINE # check wipe outNEWLINE if (player == WHITE and whites == 0) or (player == BLACK and blacks == 0):NEWLINE return -1000NEWLINE if (opponent == WHITE and whites == 0) or (opponent == BLACK and blacks == 0):NEWLINE return 1000NEWLINE NEWLINE score = 0NEWLINE # determine weigths according to the number of piecesNEWLINE for i in range (0,8):NEWLINE for j in range (0,8):NEWLINE if(board.board[i][j] == player):NEWLINE score += (self.WEIGHT_MATRIX[i][j])NEWLINE if(board.board[i][j] == opponent):NEWLINE score -= (self.WEIGHT_MATRIX[i][j])NEWLINE return scoreNEWLINE
from holoviews.element.comparison import ComparisonTestCaseNEWLINEfrom pyviz_comms import Comm, JupyterCommNEWLINENEWLINENEWLINEclass TestComm(ComparisonTestCase):NEWLINENEWLINE def test_init_comm(self):NEWLINE Comm()NEWLINENEWLINE def test_init_comm_id(self):NEWLINE comm = Comm(id='Test')NEWLINE self.assertEqual(comm.id, 'Test')NEWLINENEWLINE def test_decode(self):NEWLINE msg = 'Test'NEWLINE self.assertEqual(Comm.decode(msg), msg)NEWLINENEWLINE def test_handle_message_error_reply(self):NEWLINE def raise_error(msg=None, metadata=None):NEWLINE raise Exception('Test')NEWLINE def assert_error(msg=None, metadata=None):NEWLINE self.assertEqual(metadata['msg_type'], "Error")NEWLINE self.assertTrue(metadata['traceback'].endswith('Exception: Test'))NEWLINE comm = Comm(id='Test', on_msg=raise_error)NEWLINE comm.send = assert_errorNEWLINE comm._handle_msg({})NEWLINENEWLINE def test_handle_message_ready_reply(self):NEWLINE def assert_ready(msg=None, metadata=None):NEWLINE self.assertEqual(metadata, {'msg_type': "Ready", 'content': ''})NEWLINE comm = Comm(id='Test')NEWLINE comm.send = assert_readyNEWLINE comm._handle_msg({})NEWLINENEWLINE def test_handle_message_ready_reply_with_comm_id(self):NEWLINE def assert_ready(msg=None, metadata=None):NEWLINE self.assertEqual(metadata, {'msg_type': "Ready", 'content': '',NEWLINE 'comm_id': 'Testing id'})NEWLINE comm = Comm(id='Test')NEWLINE comm.send = assert_readyNEWLINE comm._handle_msg({'comm_id': 'Testing id'})NEWLINENEWLINENEWLINEclass TestJupyterComm(ComparisonTestCase):NEWLINENEWLINE def test_init_comm(self):NEWLINE JupyterComm()NEWLINENEWLINE def test_init_comm_id(self):NEWLINE comm = JupyterComm(id='Test')NEWLINE self.assertEqual(comm.id, 'Test')NEWLINENEWLINE def test_decode(self):NEWLINE msg = {'content': {'data': 'Test'}}NEWLINE decoded = JupyterComm.decode(msg)NEWLINE self.assertEqual(decoded, 'Test')NEWLINENEWLINE def test_on_msg(self):NEWLINE def raise_error(msg):NEWLINE if msg == 'Error':NEWLINE raise Exception()NEWLINE comm = JupyterComm(id='Test', on_msg=raise_error)NEWLINE with self.assertRaises(Exception):NEWLINE comm._handle_msg({'content': {'data': 'Error'}})NEWLINE
_base_ = [NEWLINE 'r50_sz224_4xb64_head1_lr0_1_step_ep20.py',NEWLINE]NEWLINENEWLINE# optimizerNEWLINEoptimizer = dict(paramwise_options={'\\Ahead.': dict(lr_mult=100)})NEWLINE
from aistore.client import Client, BckNEWLINENEWLINEclient = Client("http://localhost:31337")NEWLINENEWLINE# Initialize transformNEWLINEwith open('md5_pod.yaml', 'r') as f:NEWLINE spec = f.read()NEWLINE transform_id = client.etl_init(spec=spec)NEWLINENEWLINE# Transform objectsNEWLINEfor i in range(0, 10):NEWLINE object_name = "shard-{}.tar".format(i)NEWLINE output = client.transform_object(NEWLINE transform_id=transform_id,NEWLINE object_name=object_name,NEWLINE bck=Bck("shards"),NEWLINE )NEWLINE print(f"{object_name} -> {output}")NEWLINE
from kivy.app import AppNEWLINEfrom kivy.uix.boxlayout import BoxLayoutNEWLINEfrom kivy.uix.gridlayout import GridLayoutNEWLINEfrom kivy.uix.button import ButtonNEWLINEfrom kivy.uix.label import LabelNEWLINEfrom kivy.uix.textinput import TextInputNEWLINEfrom kivy.uix.widget import WidgetNEWLINENEWLINEfrom ExerciseRunning import *NEWLINENEWLINEfrom kivy.graphics import *NEWLINEfrom kivy.core.text import Label as CoreLabelNEWLINE#https://groups.google.com/forum/#!topic/kivy-users/zRCjfhBcX4cNEWLINENEWLINEclass ExerciseRunningStatisticsWidget( GridLayout ):NEWLINE def __init__( self, exercise_sets_reps_weights_name,NEWLINE journal, **kwargs ):NEWLINE super( ExerciseRunningStatisticsWidget,NEWLINE self ).__init__( **kwargs )NEWLINE self.cols = 1NEWLINE self.spacing = 1NEWLINE self.exercise_name = exercise_sets_reps_weights_nameNEWLINE excercise_label = Label(NEWLINE text = "(ExerciseRunning, default plot type)",NEWLINE size_hint_y = 0.1 )NEWLINE self.add_widget( excercise_label )NEWLINE self.drawing_widget = Widget()NEWLINE self.add_widget( self.drawing_widget )NEWLINE self.drawing_widget.bind( size = self.update_drawing )NEWLINE NEWLINE def update_drawing(self, *args):NEWLINE self.drawing_widget.canvas.clear()NEWLINE self.drawing_widget.canvas.add( Color( 1, 1, 1) )NEWLINE self.drawing_widget.bg_rect = Rectangle(NEWLINE pos = (0, 0),NEWLINE size = ( self.drawing_widget.width,NEWLINE self.drawing_widget.height ) )NEWLINE self.drawing_widget.canvas.add( self.drawing_widget.bg_rect )NEWLINE journal = App.get_running_app().journalNEWLINE drawing_instructions = \NEWLINE ExerciseRunningStatisticsWidget.gen_drawing_instructions(NEWLINE self.exercise_name, journal,NEWLINE self.drawing_widget.bg_rect.size )NEWLINE self.drawing_widget.canvas.add( drawing_instructions )NEWLINE NEWLINE @classmethodNEWLINE def gen_drawing_instructions( cls, ex_name, journal, rect_size ):NEWLINE plot = InstructionGroup()NEWLINE # testNEWLINE plot.add( Color( 1, 1, 0) )NEWLINE plot.add( Rectangle( pos = (0, 0), size = ( 100, 100 ) ) )NEWLINE #NEWLINE axes_offsets, axes_sizes, axes_instr = \NEWLINE cls.axes_instructions( rect_size )NEWLINE plot.add( axes_instr )NEWLINE dates_exces = cls.get_dates_exercises_pairs( ex_name, journal )NEWLINE ticks_instr = cls.ticks_instructions( axes_offsets,NEWLINE axes_sizes,NEWLINE dates_exces )NEWLINE plot.add( ticks_instr )NEWLINE plot.add( cls.plot_dists_times( dates_exces,NEWLINE axes_offsets,NEWLINE axes_sizes ) )NEWLINE return plotNEWLINENEWLINE @classmethodNEWLINE def axes_instructions( cls, rect_size ):NEWLINE axes = InstructionGroup()NEWLINE offset_x = 0.05 * rect_size[0]NEWLINE offset_y = 0.1 * rect_size[1]NEWLINE line_width = 2NEWLINE axes.add( Color( 0, 0, 0) )NEWLINE axes.add( Line(NEWLINE points = [ offset_x, offset_y,NEWLINE offset_x, rect_size[1] - offset_y ],NEWLINE width = line_width ) )NEWLINE axes.add( Line(NEWLINE points = [ offset_x, offset_y,NEWLINE rect_size[0] - offset_x, offset_y ],NEWLINE width = line_width ) )NEWLINE return ( ( offset_x, offset_y ),NEWLINE ( rect_size[0] - 2 * offset_x,NEWLINE rect_size[1] - 2 * offset_y ),NEWLINE axes )NEWLINENEWLINE @classmethodNEWLINE def ticks_instructions( cls, axes_offsets, axes_sizes, dates_exces ):NEWLINE x_ticks = InstructionGroup()NEWLINE ticks_len = 5NEWLINE if len( dates_exces ) != 0:NEWLINE x_ticks_distance = axes_sizes[0] / len( dates_exces )NEWLINE else:NEWLINE x_ticks_distance = 0NEWLINE yyyy_mm_dd = [ x.split(' ')[0] for (x, y) in dates_exces ]NEWLINE for i, d in enumerate( yyyy_mm_dd ):NEWLINE x_ticks.add(NEWLINE Line( points =NEWLINE [ axes_offsets[0] + ( i + 1 ) * x_ticks_distance,NEWLINE axes_offsets[1],NEWLINE axes_offsets[0] + ( i + 1 ) * x_ticks_distance,NEWLINE axes_offsets[1] - ticks_len ],NEWLINE width = 3 ) )NEWLINE text_label = CoreLabel( text=d, font_size = 15 )NEWLINE text_label.refresh()NEWLINE texture = text_label.textureNEWLINE texture_size = list( texture.size )NEWLINE x_ticks.add( Rectangle(NEWLINE texture = texture,NEWLINE size = texture_size,NEWLINE pos = (NEWLINE axes_offsets[0] + ( i + 1 ) * x_ticks_distance - 45,NEWLINE axes_offsets[1] - ticks_len - 25 )))NEWLINE return x_ticksNEWLINE NEWLINE NEWLINE @classmethodNEWLINE def get_dates_exercises_pairs( cls, ex_name, journal ):NEWLINE dates_exces = []NEWLINE # move to Journal class?NEWLINE for tr in journal.trainings:NEWLINE for ex in tr.exercises:NEWLINE if ex_name == ex.description.get( 'name' ):NEWLINE dates_exces.append( NEWLINE ( tr.description.get( "start_time" ), ex ) )NEWLINE return dates_excesNEWLINE NEWLINENEWLINE @classmethodNEWLINE def plot_dists_times( cls, dates_exces, axes_offset, axes_size ):NEWLINE dists_times_instr = InstructionGroup()NEWLINE if len( dates_exces ) != 0:NEWLINE distance_between_centers = axes_size[0] / len( dates_exces )NEWLINE else:NEWLINE distance_between_centers = 0NEWLINE max_total = 0NEWLINE for d, ex in dates_exces:NEWLINE # move to sep functionNEWLINE ex_total = 0NEWLINE dists = ex.description.get("distances")NEWLINE for dist in dists:NEWLINE try:NEWLINE ex_total = ex_total + float(dist)NEWLINE except ValueError:NEWLINE ex_total = ex_total + 0NEWLINE if ex_total > max_total:NEWLINE max_total = ex_totalNEWLINE if max_total != 0:NEWLINE y_distance = axes_size[1] / ( max_total + 1 )NEWLINE else:NEWLINE y_distance = 0NEWLINE for i, (d, ex) in enumerate( dates_exces ):NEWLINE distances = ex.description.get("distances")NEWLINE times = ex.description.get("times")NEWLINE float_dists = []NEWLINE for dist in distances:NEWLINE try:NEWLINE float_dists.append( float(dist) )NEWLINE except ValueError:NEWLINE float_dists.append( 0 )NEWLINE for f_d, d in enumerate( float_dists ):NEWLINE y_pos_top = axes_offset[1] + \NEWLINE sum( float_dists[0:f_d+1] ) * y_distanceNEWLINE y_pos_bottom = axes_offset[1] + \NEWLINE sum( float_dists[0:f_d] ) * y_distanceNEWLINE x_center_pos = \NEWLINE axes_offset[0] + distance_between_centers * (i + 1)NEWLINE x_size = 10NEWLINE y_size = y_pos_top - y_pos_bottomNEWLINE dists_times_instr.add(NEWLINE Line( points = [ x_center_pos - 5, y_pos_top,NEWLINE x_center_pos + 5, y_pos_top ],NEWLINE width = 3 ) )NEWLINE text_label = CoreLabel( text = str(d), font_size = 15 )NEWLINE text_label.refresh()NEWLINE texture = text_label.textureNEWLINE texture_size = list( texture.size )NEWLINE dists_times_instr.add( Rectangle(NEWLINE texture = texture,NEWLINE size = texture_size,NEWLINE pos = (NEWLINE x_center_pos - 10,NEWLINE y_pos_bottom + (y_pos_top - y_pos_bottom) / 2 )))NEWLINE return dists_times_instrNEWLINE
import discordNEWLINEimport yamlNEWLINEfrom discord.ext import tasks, commandsNEWLINENEWLINEfrom discord_slash import SlashCommandNEWLINEfrom discord_slash.utils.manage_commands import create_optionNEWLINENEWLINEfrom util.inspect import process_url, process_siteNEWLINENEWLINEwith open("conf.yaml", "r") as c: cfg = yaml.safe_load(c) NEWLINEbot = commands.Bot(command_prefix="::")NEWLINEslash = SlashCommand(bot, sync_commands=True)NEWLINEsites = cfg["sites"]NEWLINENEWLINE# Slash CommandNEWLINE@slash.slash(name="inlay",NEWLINE description="Embed or get a direct link for a video",NEWLINE options=[create_option( name="url", description="URL to page with a video\NEWLINE to embed", option_type=3, required=True )])NEWLINEasync def inlay(ctx, url: str):NEWLINE embed = NoneNEWLINE await ctx.send(content=f"Processing: {url}")NEWLINE async with ctx.channel.typing():NEWLINE site, url = process_site(url, sites)NEWLINE if url:NEWLINE embed = process_url(url, site, direct=True)NEWLINE if embed:NEWLINE await ctx.channel.send(content=embed)NEWLINE else:NEWLINE await ctx.channel.send(content="Could not find or embed video 😥")NEWLINENEWLINE# Automatic (On Matching URL)NEWLINEif cfg["automatic"]:NEWLINE @bot.eventNEWLINE async def on_message(ctx):NEWLINE if not ctx.author == bot.user:NEWLINE embed = NoneNEWLINE site, url, spoiler = process_site(ctx.content, sites)NEWLINE if site and url:NEWLINE async with ctx.channel.typing():NEWLINE embed = process_url(url, site)NEWLINE if embed: NEWLINE # embed = f"Sent by *{ctx.author.nick}*\n" + embedNEWLINE if spoiler:NEWLINE embed = f"||{embed} ||"NEWLINE if cfg["reply"]: NEWLINE await ctx.reply(embed, mention_author=cfg["mention"])NEWLINE else: NEWLINE await ctx.channel.send(content=embed)NEWLINENEWLINE if cfg["delete"]:NEWLINE await ctx.delete()NEWLINENEWLINE@bot.eventNEWLINEasync def on_ready():NEWLINE print(f"Logged in as {bot.user.name}")NEWLINE await bot.change_presence(activity=discord.Game(name=cfg["status"]))NEWLINENEWLINEif __name__ == "__main__":NEWLINE bot.run(cfg["secrets"]["discord"]["token"])
"""Handle sound for a given maze"""NEWLINENEWLINEimport pygameNEWLINEimport pygame.mixerNEWLINENEWLINEfrom ..designpattern import event, observerNEWLINEfrom ..model import events, mazeNEWLINEfrom . import entity_soundNEWLINEfrom . import load_soundNEWLINENEWLINENEWLINE# TODO: Adjust volume of different sounds ?NEWLINEclass MazeSound(observer.Observer):NEWLINE """Handle all the sounds of the maze"""NEWLINENEWLINE solved = "MazeSolved.wav"NEWLINE failed = "MazeFailed.wav"NEWLINE extra_game = "ExtraGame.wav"NEWLINE hurry_up = "HurryUp.wav"NEWLINE extra_life = "ExtraLife.wav"NEWLINENEWLINE def __init__(self, maze_: maze.Maze) -> None:NEWLINE """ConstructorNEWLINENEWLINE Args:NEWLINE maze_ (maze.Maze): The maze to representNEWLINE """NEWLINE super().__init__()NEWLINENEWLINE self.maze = maze_NEWLINE self.maze.add_observer(self)NEWLINE self.running = FalseNEWLINE self.failed_sound = load_sound(self.failed)NEWLINE self.solved_sound = load_sound(self.solved)NEWLINE self.extra_game_sound = load_sound(self.extra_game)NEWLINE self.hurry_up_sound = load_sound(self.hurry_up)NEWLINE self.extra_life_sound = load_sound(self.extra_life)NEWLINENEWLINE # Set of all the views for each component of the mazeNEWLINE self.entity_sounds = {entity_sound.EntitySound.from_entity(entity_) for entity_ in self.maze.entities}NEWLINENEWLINE # Start the music if loadedNEWLINE try:NEWLINE pygame.mixer.music.play(-1)NEWLINE except pygame.error:NEWLINE pass # If not loadedNEWLINENEWLINE def notify(self, event_: event.Event) -> None:NEWLINE if isinstance(event_, events.NewEntityEvent):NEWLINE self.entity_sounds.add(entity_sound.EntitySound.from_entity(event_.entity))NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.RemovedEntityEvent):NEWLINE for sound in self.entity_sounds:NEWLINE if sound.entity == event_.entity:NEWLINE self.entity_sounds.remove(sound)NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.MazeFailedEvent):NEWLINE pygame.mixer.music.stop()NEWLINE self.failed_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.MazeSolvedEvent):NEWLINE pygame.mixer.music.stop()NEWLINE self.solved_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.ExtraGameEvent):NEWLINE self.extra_game_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.HurryUpEvent):NEWLINE self.hurry_up_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.ExtraLifeEvent):NEWLINE self.extra_life_sound.play()NEWLINE returnNEWLINE
#!/usr/bin/pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE#NEWLINE# Copyright (c) 2016 Red Hat, Inc.NEWLINE#NEWLINE# This file is part of AnsibleNEWLINE#NEWLINE# Ansible is free software: you can redistribute it and/or modifyNEWLINE# it under the terms of the GNU General Public License as published byNEWLINE# the Free Software Foundation, either version 3 of the License, orNEWLINE# (at your option) any later version.NEWLINE#NEWLINE# Ansible is distributed in the hope that it will be useful,NEWLINE# but WITHOUT ANY WARRANTY; without even the implied warranty ofNEWLINE# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See theNEWLINE# GNU General Public License for more details.NEWLINE#NEWLINE# You should have received a copy of the GNU General Public LicenseNEWLINE# along with Ansible. If not, see <http://www.gnu.org/licenses/>.NEWLINE#NEWLINENEWLINEANSIBLE_METADATA = {'metadata_version': '1.1',NEWLINE 'status': ['preview'],NEWLINE 'supported_by': 'community'}NEWLINENEWLINENEWLINEDOCUMENTATION = '''NEWLINE---NEWLINEmodule: ovirt_external_provider_factsNEWLINEshort_description: Retrieve facts about one or more oVirt/RHV external providersNEWLINEauthor: "Ondra Machacek (@machacekondra)"NEWLINEversion_added: "2.3"NEWLINEdescription:NEWLINE - "Retrieve facts about one or more oVirt/RHV external providers."NEWLINEnotes:NEWLINE - "This module creates a new top-level C(ovirt_external_providers) fact, whichNEWLINE contains a list of external_providers."NEWLINEoptions:NEWLINE type:NEWLINE description:NEWLINE - "Type of the external provider."NEWLINE choices: ['os_image', 'os_network', 'os_volume', 'foreman']NEWLINE required: trueNEWLINE name:NEWLINE description:NEWLINE - "Name of the external provider, can be used as glob expression."NEWLINEextends_documentation_fragment: ovirt_factsNEWLINE'''NEWLINENEWLINEEXAMPLES = '''NEWLINE# Examples don't contain auth parameter for simplicity,NEWLINE# look at ovirt_auth module to see how to reuse authentication:NEWLINENEWLINE# Gather facts about all image external providers named C<glance>:NEWLINE- ovirt_external_provider_facts:NEWLINE type: os_imageNEWLINE name: glanceNEWLINE- debug:NEWLINE var: ovirt_external_providersNEWLINE'''NEWLINENEWLINERETURN = '''NEWLINEexternal_host_providers:NEWLINE description: "List of dictionaries of all the external_host_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/external_host_provider."NEWLINE returned: "On success and if parameter 'type: foreman' is used."NEWLINE type: listNEWLINEopenstack_image_providers:NEWLINE description: "List of dictionaries of all the openstack_image_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_image_provider."NEWLINE returned: "On success and if parameter 'type: os_image' is used."NEWLINE type: listNEWLINEopenstack_volume_providers:NEWLINE description: "List of dictionaries of all the openstack_volume_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_volume_provider."NEWLINE returned: "On success and if parameter 'type: os_volume' is used."NEWLINE type: listNEWLINEopenstack_network_providers:NEWLINE description: "List of dictionaries of all the openstack_network_provider attributes. External provider attributes can be found on your oVirt/RHV instanceNEWLINE at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/openstack_network_provider."NEWLINE returned: "On success and if parameter 'type: os_network' is used."NEWLINE type: listNEWLINE'''NEWLINENEWLINEimport fnmatchNEWLINEimport tracebackNEWLINENEWLINEfrom ansible.module_utils.basic import AnsibleModuleNEWLINEfrom ansible.module_utils.ovirt import (NEWLINE check_sdk,NEWLINE create_connection,NEWLINE get_dict_of_struct,NEWLINE ovirt_facts_full_argument_spec,NEWLINE)NEWLINENEWLINENEWLINEdef _external_provider_service(provider_type, system_service):NEWLINE if provider_type == 'os_image':NEWLINE return system_service.openstack_image_providers_service()NEWLINE elif provider_type == 'os_network':NEWLINE return system_service.openstack_network_providers_service()NEWLINE elif provider_type == 'os_volume':NEWLINE return system_service.openstack_volume_providers_service()NEWLINE elif provider_type == 'foreman':NEWLINE return system_service.external_host_providers_service()NEWLINENEWLINENEWLINEdef main():NEWLINE argument_spec = ovirt_facts_full_argument_spec(NEWLINE name=dict(default=None, required=False),NEWLINE type=dict(NEWLINE default=None,NEWLINE required=True,NEWLINE choices=[NEWLINE 'os_image', 'os_network', 'os_volume', 'foreman',NEWLINE ],NEWLINE aliases=['provider'],NEWLINE ),NEWLINE )NEWLINE module = AnsibleModule(argument_spec)NEWLINENEWLINE if module._name == 'ovirt_external_providers_facts':NEWLINE module.deprecate("The 'ovirt_external_providers_facts' module is being renamed 'ovirt_external_provider_facts'", version=2.8)NEWLINENEWLINE check_sdk(module)NEWLINENEWLINE try:NEWLINE auth = module.params.pop('auth')NEWLINE connection = create_connection(auth)NEWLINE external_providers_service = _external_provider_service(NEWLINE provider_type=module.params.pop('type'),NEWLINE system_service=connection.system_service(),NEWLINE )NEWLINE if module.params['name']:NEWLINE external_providers = [NEWLINE e for e in external_providers_service.list()NEWLINE if fnmatch.fnmatch(e.name, module.params['name'])NEWLINE ]NEWLINE else:NEWLINE external_providers = external_providers_service.list()NEWLINENEWLINE module.exit_json(NEWLINE changed=False,NEWLINE ansible_facts=dict(NEWLINE ovirt_external_providers=[NEWLINE get_dict_of_struct(NEWLINE struct=c,NEWLINE connection=connection,NEWLINE fetch_nested=module.params.get('fetch_nested'),NEWLINE attributes=module.params.get('nested_attributes'),NEWLINE ) for c in external_providersNEWLINE ],NEWLINE ),NEWLINE )NEWLINE except Exception as e:NEWLINE module.fail_json(msg=str(e), exception=traceback.format_exc())NEWLINE finally:NEWLINE connection.close(logout=auth.get('token') is None)NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE
import argparseNEWLINEimport templateNEWLINENEWLINEparser = argparse.ArgumentParser(description='EDSR and MDSR')NEWLINENEWLINEparser.add_argument('--debug', action='store_true',NEWLINE help='Enables debug mode')NEWLINEparser.add_argument('--template', default='.',NEWLINE help='You can set various templates in option.py')NEWLINENEWLINE# Hardware specificationsNEWLINEparser.add_argument('--n_threads', type=int, default=6,NEWLINE help='number of threads for data loading')NEWLINEparser.add_argument('--cpu', action='store_true',NEWLINE help='use cpu only')NEWLINEparser.add_argument('--n_GPUs', type=int, default=1,NEWLINE help='number of GPUs')NEWLINEparser.add_argument('--seed', type=int, default=1,NEWLINE help='random seed')NEWLINENEWLINE# Data specificationsNEWLINEparser.add_argument('--dir_data', type=str, default='../../../dataset',NEWLINE help='dataset directory')NEWLINEparser.add_argument('--dir_demo', type=str, default='../test',NEWLINE help='demo image directory')NEWLINEparser.add_argument('--data_train', type=str, default='DIV2K',NEWLINE help='train dataset name')NEWLINEparser.add_argument('--data_test', type=str, default='DIV2K',NEWLINE help='test dataset name')NEWLINEparser.add_argument('--data_range', type=str, default='1-800/801-810',NEWLINE help='train/test data range')NEWLINEparser.add_argument('--ext', type=str, default='sep',NEWLINE help='dataset file extension')NEWLINEparser.add_argument('--scale', type=str, default='4',NEWLINE help='super resolution scale')NEWLINEparser.add_argument('--patch_size', type=int, default=192,NEWLINE help='output patch size')NEWLINEparser.add_argument('--rgb_range', type=int, default=255,NEWLINE help='maximum value of RGB')NEWLINEparser.add_argument('--n_colors', type=int, default=3,NEWLINE help='number of color channels to use')NEWLINEparser.add_argument('--chop', action='store_true',NEWLINE help='enable memory-efficient forward')NEWLINEparser.add_argument('--no_augment', action='store_true',NEWLINE help='do not use data augmentation')NEWLINENEWLINE# Inference time data augmentationNEWLINEparser.add_argument('--chop-size', type=int, nargs='+', default=[400],NEWLINE help='patch size at inference time')NEWLINEparser.add_argument('--shave-size', type=int, nargs='+', default=[10],NEWLINE help='shave size at inference time')NEWLINEparser.add_argument('--self_ensemble', action='store_true',NEWLINE help='use self-ensemble method for test')NEWLINEparser.add_argument('--guided_filtering', action='store_true',NEWLINE help='use guided image filter for test')NEWLINEparser.add_argument('--guided-radius', type=int, default=10,NEWLINE help='guided filter radius')NEWLINEparser.add_argument('--guided-eps', type=int, default=10,NEWLINE help='guided filter eps')NEWLINEparser.add_argument('--guided-type', type=str, default='RGB',NEWLINE help='guided image type')NEWLINE# Model specificationsNEWLINEparser.add_argument('--model', default='EDSR',NEWLINE help='model name')NEWLINENEWLINEparser.add_argument('--act', type=str, default='relu',NEWLINE help='activation function')NEWLINEparser.add_argument('--pre_train', type=str, default='',NEWLINE help='pre-trained model directory')NEWLINEparser.add_argument('--pre_optimizer', type=str, default='',NEWLINE help='pre-trained optimizer directory')NEWLINEparser.add_argument('--extend', type=str, default='.',NEWLINE help='pre-trained model directory')NEWLINEparser.add_argument('--n_resblocks', type=int, default=16,NEWLINE help='number of residual blocks')NEWLINEparser.add_argument('--n_feats', type=int, default=64,NEWLINE help='number of feature maps')NEWLINEparser.add_argument('--res_scale', type=float, default=1,NEWLINE help='residual scaling')NEWLINEparser.add_argument('--shift_mean', default=True,NEWLINE help='subtract pixel mean from the input')NEWLINEparser.add_argument('--dilation', action='store_true',NEWLINE help='use dilated convolution')NEWLINEparser.add_argument('--precision', type=str, default='single',NEWLINE choices=('single', 'half'),NEWLINE help='FP precision for test (single | half)')NEWLINENEWLINE# Option for Residual dense network (RDN)NEWLINEparser.add_argument('--G0', type=int, default=64,NEWLINE help='default number of filters. (Use in RDN)')NEWLINEparser.add_argument('--RDNkSize', type=int, default=3,NEWLINE help='default kernel size. (Use in RDN)')NEWLINEparser.add_argument('--RDNconfig', type=str, default='B',NEWLINE help='parameters config of RDN. (Use in RDN)')NEWLINENEWLINE# Option for Residual channel attention network (RCAN)NEWLINEparser.add_argument('--n_resgroups', type=int, default=10,NEWLINE help='number of residual groups')NEWLINEparser.add_argument('--reduction', type=int, default=16,NEWLINE help='number of feature maps reduction')NEWLINENEWLINE# Training specificationsNEWLINEparser.add_argument('--reset', action='store_true',NEWLINE help='reset the training')NEWLINEparser.add_argument('--test_every', type=int, default=1000,NEWLINE help='do test per every N batches')NEWLINEparser.add_argument('--test_epoch', type=int, default=0,NEWLINE help='do test per every N epochs, default is 0, training without testing')NEWLINEparser.add_argument('--load_log', type=str, default='loss',NEWLINE help='load log file to cal epoch number')NEWLINEparser.add_argument('--epochs', type=int, default=300,NEWLINE help='number of epochs to train')NEWLINEparser.add_argument('--batch_size', type=int, default=16,NEWLINE help='input batch size for training')NEWLINEparser.add_argument('--split_batch', type=int, default=1,NEWLINE help='split the batch into smaller chunks')NEWLINEparser.add_argument('--test_only', action='store_true',NEWLINE help='set this option to test the model')NEWLINEparser.add_argument('--gan_k', type=int, default=1,NEWLINE help='k value for adversarial loss')NEWLINENEWLINE# Optimization specificationsNEWLINEparser.add_argument('--lr', type=float, default=1e-4,NEWLINE help='learning rate')NEWLINEparser.add_argument('--decay', type=str, default='200',NEWLINE help='learning rate decay type')NEWLINEparser.add_argument('--gamma', type=float, default=0.5,NEWLINE help='learning rate decay factor for step decay')NEWLINEparser.add_argument('--optimizer', default='ADAM',NEWLINE choices=('SGD', 'ADAM', 'RMSprop'),NEWLINE help='optimizer to use (SGD | ADAM | RMSprop)')NEWLINEparser.add_argument('--momentum', type=float, default=0.9,NEWLINE help='SGD momentum')NEWLINEparser.add_argument('--betas', type=tuple, default=(0.9, 0.999),NEWLINE help='ADAM beta')NEWLINEparser.add_argument('--epsilon', type=float, default=1e-8,NEWLINE help='ADAM epsilon for numerical stability')NEWLINEparser.add_argument('--weight_decay', type=float, default=0,NEWLINE help='weight decay')NEWLINEparser.add_argument('--gclip', type=float, default=0,NEWLINE help='gradient clipping threshold (0 = no clipping)')NEWLINEparser.add_argument('--deep-supervision', default=False,NEWLINE help='if using deep supervision')NEWLINEparser.add_argument('--deep-supervision-factor', type=float, default=0.2,NEWLINE help='deep supervision factor')NEWLINE# Loss specificationsNEWLINEparser.add_argument('--loss', type=str, default='1*L1',NEWLINE help='loss function configuration')NEWLINEparser.add_argument('--skip_threshold', type=float, default='1e8',NEWLINE help='skipping batch that has large error')NEWLINEparser.add_argument('--l1-clip-min', type=float, default=0.0,NEWLINE help='torch.clamp(||sr-hr||, min, max)')NEWLINEparser.add_argument('--l1-clip-max', type=float, default=10.0,NEWLINE help='torch.clamp(||sr-hr||, min, max)')NEWLINENEWLINE# Log specificationsNEWLINEparser.add_argument('--save', type=str, default='test',NEWLINE help='file name to save')NEWLINEparser.add_argument('--load', type=str, default='',NEWLINE help='file name to load')NEWLINEparser.add_argument('--resume', type=int, default=0,NEWLINE help='resume from specific checkpoint')NEWLINEparser.add_argument('--save_models', action='store_true',NEWLINE help='save all intermediate models')NEWLINEparser.add_argument('--print_every', type=int, default=100,NEWLINE help='how many batches to wait before logging training status')NEWLINEparser.add_argument('--save_results', action='store_true',NEWLINE help='save output results')NEWLINEparser.add_argument('--save_gt', action='store_true',NEWLINE help='save low-resolution and high-resolution images together')NEWLINENEWLINEargs = parser.parse_args()NEWLINEtemplate.set_template(args)NEWLINENEWLINEargs.scale = list(map(lambda x: int(x), args.scale.split('+')))NEWLINEargs.data_train = args.data_train.split('+')NEWLINEargs.data_test = args.data_test.split('+')NEWLINENEWLINEif args.epochs == 0:NEWLINE args.epochs = 1e8NEWLINENEWLINEfor arg in vars(args):NEWLINE if vars(args)[arg] == 'True':NEWLINE vars(args)[arg] = TrueNEWLINE elif vars(args)[arg] == 'False':NEWLINE vars(args)[arg] = FalseNEWLINENEWLINE
# Copyright 2020, The TensorFlow Authors.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINE# Lint as: python3NEWLINE"""An example for using keras_evaluation."""NEWLINENEWLINEfrom absl import appNEWLINEfrom absl import flagsNEWLINENEWLINEimport numpy as npNEWLINEimport tensorflow as tfNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.data_structures import AttackTypeNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.data_structures import get_flattened_attack_metricsNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.data_structures import SlicingSpecNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.keras_evaluation import MembershipInferenceCallbackNEWLINEfrom tensorflow_privacy.privacy.privacy_tests.membership_inference_attack.keras_evaluation import run_attack_on_keras_modelNEWLINENEWLINENEWLINEFLAGS = flags.FLAGSNEWLINEflags.DEFINE_float('learning_rate', 0.02, 'Learning rate for training')NEWLINEflags.DEFINE_integer('batch_size', 250, 'Batch size')NEWLINEflags.DEFINE_integer('epochs', 100, 'Number of epochs')NEWLINEflags.DEFINE_string('model_dir', None, 'Model directory.')NEWLINEflags.DEFINE_bool('tensorboard_merge_classifiers', False, 'If true, plot 'NEWLINE 'different classifiers with the same slicing_spec and metric 'NEWLINE 'in the same figure.')NEWLINENEWLINENEWLINEdef small_cnn():NEWLINE """Setup a small CNN for image classification."""NEWLINE model = tf.keras.models.Sequential()NEWLINE model.add(tf.keras.layers.Input(shape=(32, 32, 3)))NEWLINENEWLINE for _ in range(3):NEWLINE model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='relu'))NEWLINE model.add(tf.keras.layers.MaxPooling2D())NEWLINENEWLINE model.add(tf.keras.layers.Flatten())NEWLINE model.add(tf.keras.layers.Dense(64, activation='relu'))NEWLINE model.add(tf.keras.layers.Dense(10))NEWLINE return modelNEWLINENEWLINENEWLINEdef load_cifar10():NEWLINE """Loads CIFAR10 data."""NEWLINE (x_train, y_train), (x_test, y_test) = tf.keras.datasets.cifar10.load_data()NEWLINENEWLINE x_train = np.array(x_train, dtype=np.float32) / 255NEWLINE x_test = np.array(x_test, dtype=np.float32) / 255NEWLINENEWLINE y_train = np.array(y_train, dtype=np.int32).squeeze()NEWLINE y_test = np.array(y_test, dtype=np.int32).squeeze()NEWLINENEWLINE return x_train, y_train, x_test, y_testNEWLINENEWLINENEWLINEdef main(unused_argv):NEWLINE # Load training and test data.NEWLINE x_train, y_train, x_test, y_test = load_cifar10()NEWLINENEWLINE # Get model, optimizer and specify loss.NEWLINE model = small_cnn()NEWLINE optimizer = tf.keras.optimizers.SGD(lr=FLAGS.learning_rate, momentum=0.9)NEWLINE loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)NEWLINE model.compile(optimizer=optimizer, loss=loss, metrics=['accuracy'])NEWLINENEWLINE # Get callback for membership inference attack.NEWLINE mia_callback = MembershipInferenceCallback(NEWLINE (x_train, y_train),NEWLINE (x_test, y_test),NEWLINE slicing_spec=SlicingSpec(entire_dataset=True, by_class=True),NEWLINE attack_types=[AttackType.THRESHOLD_ATTACK,NEWLINE AttackType.K_NEAREST_NEIGHBORS],NEWLINE tensorboard_dir=FLAGS.model_dir,NEWLINE tensorboard_merge_classifiers=FLAGS.tensorboard_merge_classifiers)NEWLINENEWLINE # Train model with KerasNEWLINE model.fit(NEWLINE x_train,NEWLINE y_train,NEWLINE epochs=FLAGS.epochs,NEWLINE validation_data=(x_test, y_test),NEWLINE batch_size=FLAGS.batch_size,NEWLINE callbacks=[mia_callback],NEWLINE verbose=2)NEWLINENEWLINE print('End of training attack:')NEWLINE attack_results = run_attack_on_keras_model(NEWLINE model, (x_train, y_train), (x_test, y_test),NEWLINE slicing_spec=SlicingSpec(entire_dataset=True, by_class=True),NEWLINE attack_types=[NEWLINE AttackType.THRESHOLD_ATTACK, AttackType.K_NEAREST_NEIGHBORSNEWLINE ])NEWLINE att_types, att_slices, att_metrics, att_values = get_flattened_attack_metrics(NEWLINE attack_results)NEWLINE print('\n'.join([' %s: %.4f' % (', '.join([s, t, m]), v) for t, s, m, v inNEWLINE zip(att_types, att_slices, att_metrics, att_values)]))NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE app.run(main)NEWLINE
import datetimeNEWLINEprint(datetime.date.today().year)NEWLINEprint(datetime.date.today().month)NEWLINEprint(datetime.date.today().day)NEWLINE
from selenium.webdriver.common.keys import KeysNEWLINEimport timeNEWLINEimport randomNEWLINENEWLINEfrom selenium_ui.base_page import BasePageNEWLINEfrom selenium_ui.jira.pages.selectors import UrlManager, LoginPageLocators, DashboardLocators, PopupLocators, \NEWLINE IssueLocators, ProjectLocators, SearchLocators, BoardsListLocators, BoardLocators, LogoutLocatorsNEWLINENEWLINENEWLINEclass PopupManager(BasePage):NEWLINENEWLINE def dismiss_default_popup(self):NEWLINE return self.dismiss_popup(PopupLocators.default_popup, PopupLocators.popup_1, PopupLocators.popup_2)NEWLINENEWLINENEWLINEclass Login(BasePage):NEWLINE page_url = LoginPageLocators.login_urlNEWLINE page_loaded_selector = LoginPageLocators.system_dashboardNEWLINENEWLINE def is_first_login(self):NEWLINE return True if self.get_elements(LoginPageLocators.continue_button) else FalseNEWLINENEWLINE def first_login_setup(self):NEWLINE self.wait_until_visible(LoginPageLocators.continue_button).send_keys(Keys.ESCAPE)NEWLINE self.get_element(LoginPageLocators.continue_button).click()NEWLINE self.wait_until_visible(LoginPageLocators.avatar_page_next_button).click()NEWLINE self.wait_until_visible(LoginPageLocators.explore_current_projects).click()NEWLINE self.go_to_url(DashboardLocators.dashboard_url)NEWLINE self.wait_until_visible(DashboardLocators.dashboard_window)NEWLINENEWLINE def set_credentials(self, username, password):NEWLINE self.get_element(LoginPageLocators.login_field).send_keys(username)NEWLINE self.get_element(LoginPageLocators.password_field).send_keys(password)NEWLINE self.get_element(LoginPageLocators.login_submit_button).click()NEWLINENEWLINENEWLINEclass Logout(BasePage):NEWLINE page_url = LogoutLocators.logout_urlNEWLINENEWLINE def click_logout(self):NEWLINE self.get_element(LogoutLocators.logout_submit_button).click()NEWLINENEWLINE def wait_for_page_loaded(self):NEWLINE self.wait_until_present(LogoutLocators.login_button_link)NEWLINENEWLINENEWLINEclass Dashboard(BasePage):NEWLINE page_url = DashboardLocators.dashboard_urlNEWLINENEWLINE def wait_dashboard_presented(self):NEWLINE self.wait_until_present(DashboardLocators.dashboard_window)NEWLINENEWLINENEWLINEclass Issue(BasePage):NEWLINE page_loaded_selector = IssueLocators.issue_titleNEWLINENEWLINE def __init__(self, driver, issue_key=None, issue_id=None):NEWLINE BasePage.__init__(self, driver)NEWLINE url_manager_modal = UrlManager(issue_key=issue_key)NEWLINE url_manager_edit_page = UrlManager(issue_id=issue_id)NEWLINE self.page_url = url_manager_modal.issue_url()NEWLINE self.page_url_edit_issue = url_manager_edit_page.edit_issue_url()NEWLINE self.page_url_edit_comment = url_manager_edit_page.edit_comments_url()NEWLINENEWLINE def wait_for_issue_title(self):NEWLINE self.wait_until_visible(IssueLocators.issue_title)NEWLINENEWLINE def go_to_edit_issue(self):NEWLINE self.go_to_url(self.page_url_edit_issue)NEWLINE self.wait_until_visible(IssueLocators.edit_issue_page)NEWLINENEWLINE def go_to_edit_comment(self):NEWLINE self.go_to_url(self.page_url_edit_comment)NEWLINE self.wait_until_visible(IssueLocators.edit_comment_add_comment_button)NEWLINENEWLINE def fill_summary_edit(self):NEWLINE text_summary = f"Edit summary form selenium - {self.generate_random_string(10)}"NEWLINE self.get_element(IssueLocators.issue_summary_field).send_keys(text_summary)NEWLINENEWLINE def __fill_rich_editor_textfield(self, text, selector):NEWLINE self.wait_until_available_to_switch(selector)NEWLINE self.get_element(IssueLocators.tinymce_description_field).send_keys(text)NEWLINE self.return_to_parent_frame()NEWLINENEWLINE def edit_issue_submit(self):NEWLINE self.get_element(IssueLocators.edit_issue_submit).click()NEWLINENEWLINE def fill_description_edit(self):NEWLINE text_description = f"Edit description form selenium - {self.generate_random_string(30)}"NEWLINE self.__fill_rich_editor_textfield(text_description, selector=IssueLocators.issue_description_field)NEWLINENEWLINE def open_create_issue_modal(self):NEWLINE self.wait_until_clickable(IssueLocators.create_issue_button).click()NEWLINE self.wait_until_visible(IssueLocators.issue_modal)NEWLINENEWLINE def fill_description_create(self):NEWLINE text_description = f'Description: {self.generate_random_string(100)}'NEWLINE self.__fill_rich_editor_textfield(text_description, selector=IssueLocators.issue_description_field)NEWLINENEWLINE def fill_summary_create(self):NEWLINE summary = f"Issue created date {time.time()}"NEWLINE self.wait_until_clickable(IssueLocators.issue_summary_field).send_keys(summary)NEWLINENEWLINE def assign_to_me(self):NEWLINE assign_to_me_links = self.get_elements(IssueLocators.issue_assign_to_me_link)NEWLINE for link in assign_to_me_links:NEWLINE link.click()NEWLINENEWLINE def set_resolution(self):NEWLINE resolution_field = self.get_elements(IssueLocators.issue_resolution_field)NEWLINE if resolution_field:NEWLINE drop_down_length = len(self.select(resolution_field[0]).options)NEWLINE random_resolution_id = random.randint(1, drop_down_length - 1)NEWLINE self.select(resolution_field[0]).select_by_index(random_resolution_id)NEWLINENEWLINE def set_issue_type(self):NEWLINE def __filer_epic(element):NEWLINE return "epic" not in element.get_attribute("class").lower()NEWLINENEWLINE self.get_element(IssueLocators.issue_type_field).click()NEWLINE issue_dropdown_elements = self.get_elements(IssueLocators.issue_type_dropdown_elements)NEWLINE if issue_dropdown_elements:NEWLINE filtered_issue_elements = list(filter(__filer_epic, issue_dropdown_elements))NEWLINE rnd_issue_type_el = random.choice(filtered_issue_elements)NEWLINE self.action_chains().move_to_element(rnd_issue_type_el).click(rnd_issue_type_el).perform()NEWLINE self.wait_until_invisible(IssueLocators.issue_ready_to_save_spinner)NEWLINENEWLINE def submit_issue(self):NEWLINE self.wait_until_clickable(IssueLocators.issue_submit_button).click()NEWLINE self.wait_until_invisible(IssueLocators.issue_modal)NEWLINENEWLINE def fill_comment_edit(self):NEWLINE text = 'Comment from selenium'NEWLINE self.__fill_rich_editor_textfield(text, selector=IssueLocators.edit_comment_text_field)NEWLINENEWLINE def edit_comment_submit(self):NEWLINE self.get_element(IssueLocators.edit_comment_add_comment_button).click()NEWLINE self.wait_until_visible(IssueLocators.issue_title)NEWLINENEWLINENEWLINEclass Project(BasePage):NEWLINE page_loaded_selector = ProjectLocators.project_summary_property_columnNEWLINENEWLINE def __init__(self, driver, project_key):NEWLINE BasePage.__init__(self, driver)NEWLINE url_manager = UrlManager(project_key=project_key)NEWLINE self.page_url = url_manager.project_summary_url()NEWLINENEWLINENEWLINEclass ProjectsList(BasePage):NEWLINENEWLINE def __init__(self, driver, projects_list_pages):NEWLINE BasePage.__init__(self, driver)NEWLINE self.projects_list_page = random.randint(1, projects_list_pages)NEWLINE url_manager = UrlManager(projects_list_page=self.projects_list_page)NEWLINE self.page_url = url_manager.projects_list_page_url()NEWLINENEWLINE def wait_for_page_loaded(self):NEWLINE self.wait_until_any_ec_presented(NEWLINE selector_names=[ProjectLocators.projects_list, ProjectLocators.projects_not_found])NEWLINENEWLINENEWLINEclass BoardsList(BasePage):NEWLINE page_url = BoardsListLocators.boards_list_urlNEWLINE page_loaded_selector = BoardsListLocators.boards_listNEWLINENEWLINENEWLINEclass Search(BasePage):NEWLINENEWLINE def __init__(self, driver, jql):NEWLINE BasePage.__init__(self, driver)NEWLINE url_manager = UrlManager(jql=jql)NEWLINE self.page_url = url_manager.jql_search_url()NEWLINENEWLINE def wait_for_page_loaded(self):NEWLINE self.wait_until_any_ec_presented(selector_names=[SearchLocators.search_issue_table,NEWLINE SearchLocators.search_issue_content,NEWLINE SearchLocators.search_no_issue_found])NEWLINENEWLINENEWLINEclass Board(BasePage):NEWLINE page_loaded_selector = BoardLocators.board_columnsNEWLINENEWLINE def __init__(self, driver, board_id):NEWLINE BasePage.__init__(self, driver)NEWLINE url_manager = UrlManager(board_id=board_id)NEWLINE self.page_url = url_manager.scrum_board_url()NEWLINE self.backlog_url = url_manager.scrum_board_backlog_url()NEWLINENEWLINE def go_to_backlog(self):NEWLINE self.go_to_url(self.backlog_url)NEWLINENEWLINE def wait_for_scrum_board_backlog(self):NEWLINE self.wait_until_present(BoardLocators.scrum_board_backlog_content)NEWLINE
import numpy as npNEWLINEfrom gym.envs.registration import registerNEWLINENEWLINEfrom onpolicy.envs.highway.highway_env import utilsNEWLINEfrom onpolicy.envs.highway.highway_env.envs.common.abstract import AbstractEnvNEWLINEfrom onpolicy.envs.highway.highway_env.road.lane import LineType, StraightLaneNEWLINEfrom onpolicy.envs.highway.highway_env.road.road import Road, RoadNetworkNEWLINEfrom onpolicy.envs.highway.highway_env.vehicle.controller import MDPVehicleNEWLINENEWLINENEWLINEclass TwoWayEnv(AbstractEnv):NEWLINENEWLINE """NEWLINE A risk management task: the agent is driving on a two-way lane with icoming traffic.NEWLINENEWLINE It must balance making progress by overtaking and ensuring safety.NEWLINENEWLINE These conflicting objectives are implemented by a reward signal and a constraint signal,NEWLINE in the CMDP/BMDP framework.NEWLINE """NEWLINENEWLINE COLLISION_REWARD: float = 0NEWLINE LEFT_LANE_CONSTRAINT: float = 1NEWLINE LEFT_LANE_REWARD: float = 0.2NEWLINE HIGH_SPEED_REWARD: float = 0.8NEWLINENEWLINE @classmethodNEWLINE def default_config(cls) -> dict:NEWLINE config = super().default_config()NEWLINE config.update({NEWLINE "observation": {NEWLINE "type": "TimeToCollision",NEWLINE "horizon": 5NEWLINE },NEWLINE "action": {NEWLINE "type": "DiscreteMetaAction",NEWLINE },NEWLINE })NEWLINE return configNEWLINENEWLINE def _reward(self, action: int) -> float:NEWLINE """NEWLINE The vehicle is rewarded for driving with high speedNEWLINE :param action: the action performedNEWLINE :return: the reward of the state-action transitionNEWLINE """NEWLINE neighbours = self.road.network.all_side_lanes(self.vehicle.lane_index)NEWLINENEWLINE reward = self.HIGH_SPEED_REWARD * self.vehicle.speed_index / (self.vehicle.SPEED_COUNT - 1) \NEWLINE + self.LEFT_LANE_REWARD * (len(neighbours) - 1 - self.vehicle.target_lane_index[2]) / (len(neighbours) - 1)NEWLINE return rewardNEWLINENEWLINE def _is_terminal(self) -> bool:NEWLINE """The episode is over if the ego vehicle crashed or the time is out."""NEWLINE return self.vehicle.crashedNEWLINENEWLINE def _cost(self, action: int) -> float:NEWLINE """The constraint signal is the time spent driving on the opposite lane, and occurrence of collisions."""NEWLINE return float(self.vehicle.crashed) + float(self.vehicle.lane_index[2] == 0)/15NEWLINENEWLINE def _reset(self) -> np.ndarray:NEWLINE self._make_road()NEWLINE self._make_vehicles()NEWLINENEWLINE def _make_road(self, length=800):NEWLINE """NEWLINE Make a road composed of a two-way road.NEWLINENEWLINE :return: the roadNEWLINE """NEWLINE net = RoadNetwork()NEWLINENEWLINE # LanesNEWLINE net.add_lane("a", "b", StraightLane([0, 0], [length, 0],NEWLINE line_types=(LineType.CONTINUOUS_LINE, LineType.STRIPED)))NEWLINE net.add_lane("a", "b", StraightLane([0, StraightLane.DEFAULT_WIDTH], [length, StraightLane.DEFAULT_WIDTH],NEWLINE line_types=(LineType.NONE, LineType.CONTINUOUS_LINE)))NEWLINE net.add_lane("b", "a", StraightLane([length, 0], [0, 0],NEWLINE line_types=(LineType.NONE, LineType.NONE)))NEWLINENEWLINE road = Road(network=net, np_random=self.np_random, record_history=self.config["show_trajectories"])NEWLINE self.road = roadNEWLINENEWLINE def _make_vehicles(self) -> None:NEWLINE """NEWLINE Populate a road with several vehicles on the roadNEWLINENEWLINE :return: the ego-vehicleNEWLINE """NEWLINE road = self.roadNEWLINE ego_vehicle = self.action_type.vehicle_class(road,NEWLINE road.network.get_lane(("a", "b", 1)).position(30, 0),NEWLINE speed=30)NEWLINE road.vehicles.append(ego_vehicle)NEWLINE self.vehicle = ego_vehicleNEWLINENEWLINE vehicles_type = utils.class_from_path(self.config["npc_vehicles_type"])NEWLINE for i in range(3):NEWLINE self.road.vehicles.append(NEWLINE vehicles_type(road,NEWLINE position=road.network.get_lane(("a", "b", 1))NEWLINE .position(70+40*i + 10*self.np_random.randn(), 0),NEWLINE heading=road.network.get_lane(("a", "b", 1)).heading_at(70+40*i),NEWLINE speed=24 + 2*self.np_random.randn(),NEWLINE enable_lane_change=False)NEWLINE )NEWLINE for i in range(2):NEWLINE v = vehicles_type(road,NEWLINE position=road.network.get_lane(("b", "a", 0))NEWLINE .position(200+100*i + 10*self.np_random.randn(), 0),NEWLINE heading=road.network.get_lane(("b", "a", 0)).heading_at(200+100*i),NEWLINE speed=20 + 5*self.np_random.randn(),NEWLINE enable_lane_change=False)NEWLINE v.target_lane_index = ("b", "a", 0)NEWLINE self.road.vehicles.append(v)NEWLINENEWLINENEWLINEregister(NEWLINE id='two-way-v0',NEWLINE entry_point='highway_env.envs:TwoWayEnv',NEWLINE max_episode_steps=15NEWLINE)NEWLINE
#!/usr/bin/env pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE# @Time : 2018/12/24 10:05 AMNEWLINE# @Author : zhangzhenNEWLINE# @Site : NEWLINE# @File : torch_lstm_classification.pyNEWLINE# @Software: PyCharmNEWLINEimport torchNEWLINEimport torchvisionNEWLINEfrom torch import nnNEWLINEimport torch.utils.data as DataNEWLINEfrom torch.autograd import VariableNEWLINEimport torchvision.datasets as dsNEWLINEimport torchvision.transforms as transformsNEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINEtorch.manual_seed(1) # reproducibleNEWLINENEWLINE# hyper ParametersNEWLINEEPOCH = 1NEWLINEBATCH_SIZE = 128NEWLINETIME_STEP = 28NEWLINEINPUT_SIZE = 28NEWLINELR = 0.01NEWLINEDOWNLOAD_MNIST = FalseNEWLINEROOT = '/Users/zhangzhen/gitRepository/AnalyticsVidhya/data/mnist'NEWLINEtrain_data = torchvision.datasets.MNIST(NEWLINE root=ROOT,NEWLINE train=True,NEWLINE transform=torchvision.transforms.ToTensor(),NEWLINE download=DOWNLOAD_MNIST,NEWLINE)NEWLINENEWLINE# plt.imshow(train_data.train_data[0], cmap='gray')NEWLINE# plt.show()NEWLINEtest_data = torchvision.datasets.MNIST(NEWLINE root=ROOT,NEWLINE train=False,NEWLINE)NEWLINENEWLINE# batch settingNEWLINEtrain_loader = Data.DataLoader(NEWLINE dataset=train_data,NEWLINE batch_size=BATCH_SIZE,NEWLINE shuffle=TrueNEWLINE)NEWLINE# simplify data setNEWLINEtest_x = Variable(test_data.test_data, volatile=True).type(torch.FloatTensor)[: 2000]/255.NEWLINEtest_y = test_data.test_labels.numpy().squeeze()[: 2000]NEWLINENEWLINEprint(test_x.shape, test_y.shape)NEWLINENEWLINENEWLINEclass RNN(nn.Module):NEWLINENEWLINE def __init__(self):NEWLINE super(RNN, self).__init__()NEWLINENEWLINE self.rnn = nn.LSTM(NEWLINE input_size=INPUT_SIZE,NEWLINE hidden_size=64,NEWLINE num_layers=2,NEWLINE batch_first=True, # False -> (time_step, batch, input) True -> (batch, time_step, input)NEWLINE )NEWLINE self.out = nn.Linear(64, 10)NEWLINENEWLINE def forward(self, *input):NEWLINE r_out, h_state = self.rnn(input[0], None) # x-> (batch, time_step, input_size)NEWLINE out = self.out(r_out[:, -1, :]) # (batch, time_step, input)NEWLINE return outNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINENEWLINE rnn = RNN()NEWLINE # print(rnn)NEWLINE optimizer = torch.optim.Adam(rnn.parameters(), lr=LR)NEWLINE loss_func = nn.CrossEntropyLoss()NEWLINENEWLINE for epoch in range(EPOCH):NEWLINE for step, (x, y) in enumerate(train_loader):NEWLINE b_x = Variable(x.view(-1, 28, 28))NEWLINE b_y = Variable(y)NEWLINE output = rnn(b_x)NEWLINE loss = loss_func(output, b_y)NEWLINENEWLINE optimizer.zero_grad()NEWLINE loss.backward()NEWLINE optimizer.step()NEWLINENEWLINE if step % 50 == 0:NEWLINE test_out = rnn(test_x)NEWLINE pred_y = torch.max(test_out, 1)[1].data.numpy().squeeze()NEWLINE acc = sum(pred_y==test_y)/test_y.sizeNEWLINE print('Epoch:', epoch, '| train loss: %.4f' % loss.item(), 'test acc: %.4f' % acc)NEWLINENEWLINE # print 10 predictions from test dataNEWLINE test_output = rnn(test_x[:10])NEWLINE pred_y = torch.max(test_output, 1)[1].data.numpy().squeeze()NEWLINE print(pred_y, 'prediction number')NEWLINE print(test_y[:10], 'real number')NEWLINE
"""Builder class used to transform a mypy AST to the IR form.NEWLINENEWLINEThe IRBuilder class maintains transformation state and provides accessNEWLINEto various helpers used to implement the transform.NEWLINENEWLINEThe top-level transform control logic is in mypyc.irbuild.main.NEWLINENEWLINEmypyc.irbuild.visitor.IRBuilderVisitor is used to dispatch based on mypyNEWLINEAST node type to code that actually does the bulk of the work. ForNEWLINEexample, expressions are transformed in mypyc.irbuild.expression andNEWLINEfunctions are transformed in mypyc.irbuild.function.NEWLINE"""NEWLINENEWLINEfrom mypyc.irbuild.prepare import RegisterImplInfoNEWLINEfrom typing import Callable, Dict, List, Tuple, Optional, Union, Sequence, Set, AnyNEWLINEfrom typing_extensions import overloadNEWLINEfrom mypy.backports import OrderedDictNEWLINENEWLINEfrom mypy.build import GraphNEWLINEfrom mypy.nodes import (NEWLINE MypyFile, SymbolNode, Statement, OpExpr, IntExpr, NameExpr, LDEF, Var, UnaryExpr,NEWLINE CallExpr, IndexExpr, Expression, MemberExpr, RefExpr, Lvalue, TupleExpr,NEWLINE TypeInfo, Decorator, OverloadedFuncDef, StarExpr, ComparisonExpr, GDEF,NEWLINE ArgKind, ARG_POS, ARG_NAMED, FuncDef,NEWLINE)NEWLINEfrom mypy.types import (NEWLINE Type, Instance, TupleType, UninhabitedType, get_proper_typeNEWLINE)NEWLINEfrom mypy.maptype import map_instance_to_supertypeNEWLINEfrom mypy.visitor import ExpressionVisitor, StatementVisitorNEWLINEfrom mypy.util import split_targetNEWLINENEWLINEfrom mypyc.common import TEMP_ATTR_NAME, SELF_NAMENEWLINEfrom mypyc.irbuild.prebuildvisitor import PreBuildVisitorNEWLINEfrom mypyc.ir.ops import (NEWLINE BasicBlock, Integer, Value, Register, Op, Assign, Branch, Unreachable, TupleGet, GetAttr,NEWLINE SetAttr, LoadStatic, InitStatic, NAMESPACE_MODULE, RaiseStandardErrorNEWLINE)NEWLINEfrom mypyc.ir.rtypes import (NEWLINE RType, RTuple, RInstance, c_int_rprimitive, int_rprimitive, dict_rprimitive,NEWLINE none_rprimitive, is_none_rprimitive, object_rprimitive, is_object_rprimitive,NEWLINE str_rprimitive, is_tagged, is_list_rprimitive, is_tuple_rprimitive, c_pyssize_t_rprimitiveNEWLINE)NEWLINEfrom mypyc.ir.func_ir import FuncIR, INVALID_FUNC_DEF, RuntimeArg, FuncSignature, FuncDeclNEWLINEfrom mypyc.ir.class_ir import ClassIR, NonExtClassInfoNEWLINEfrom mypyc.primitives.registry import CFunctionDescription, function_opsNEWLINEfrom mypyc.primitives.list_ops import to_list, list_pop_last, list_get_item_unsafe_opNEWLINEfrom mypyc.primitives.dict_ops import dict_get_item_op, dict_set_item_opNEWLINEfrom mypyc.primitives.generic_ops import py_setattr_op, iter_op, next_opNEWLINEfrom mypyc.primitives.misc_ops import (NEWLINE import_op, check_unpack_count_op, get_module_dict_op, import_extra_args_opNEWLINE)NEWLINEfrom mypyc.crash import catch_errorsNEWLINEfrom mypyc.options import CompilerOptionsNEWLINEfrom mypyc.errors import ErrorsNEWLINEfrom mypyc.irbuild.nonlocalcontrol import (NEWLINE NonlocalControl, BaseNonlocalControl, LoopNonlocalControl, GeneratorNonlocalControlNEWLINE)NEWLINEfrom mypyc.irbuild.targets import (NEWLINE AssignmentTarget, AssignmentTargetRegister, AssignmentTargetIndex, AssignmentTargetAttr,NEWLINE AssignmentTargetTupleNEWLINE)NEWLINEfrom mypyc.irbuild.context import FuncInfo, ImplicitClassNEWLINEfrom mypyc.irbuild.mapper import MapperNEWLINEfrom mypyc.irbuild.ll_builder import LowLevelIRBuilderNEWLINEfrom mypyc.irbuild.util import is_constantNEWLINENEWLINENEWLINEclass IRVisitor(ExpressionVisitor[Value], StatementVisitor[None]):NEWLINE passNEWLINENEWLINENEWLINEclass UnsupportedException(Exception):NEWLINE passNEWLINENEWLINENEWLINESymbolTarget = Union[AssignmentTargetRegister, AssignmentTargetAttr]NEWLINENEWLINENEWLINEclass IRBuilder:NEWLINE def __init__(self,NEWLINE current_module: str,NEWLINE types: Dict[Expression, Type],NEWLINE graph: Graph,NEWLINE errors: Errors,NEWLINE mapper: Mapper,NEWLINE pbv: PreBuildVisitor,NEWLINE visitor: IRVisitor,NEWLINE options: CompilerOptions,NEWLINE singledispatch_impls: Dict[FuncDef, List[RegisterImplInfo]]) -> None:NEWLINE self.builder = LowLevelIRBuilder(current_module, mapper, options)NEWLINE self.builders = [self.builder]NEWLINE self.symtables: List[OrderedDict[SymbolNode, SymbolTarget]] = [OrderedDict()]NEWLINE self.runtime_args: List[List[RuntimeArg]] = [[]]NEWLINE self.function_name_stack: List[str] = []NEWLINE self.class_ir_stack: List[ClassIR] = []NEWLINENEWLINE self.current_module = current_moduleNEWLINE self.mapper = mapperNEWLINE self.types = typesNEWLINE self.graph = graphNEWLINE self.ret_types: List[RType] = []NEWLINE self.functions: List[FuncIR] = []NEWLINE self.classes: List[ClassIR] = []NEWLINE self.final_names: List[Tuple[str, RType]] = []NEWLINE self.callable_class_names: Set[str] = set()NEWLINE self.options = optionsNEWLINENEWLINE # These variables keep track of the number of lambdas, implicit indices, and implicitNEWLINE # iterators instantiated so we avoid name conflicts. The indices and iterators areNEWLINE # instantiated from for-loops.NEWLINE self.lambda_counter = 0NEWLINE self.temp_counter = 0NEWLINENEWLINE # These variables are populated from the first-pass PreBuildVisitor.NEWLINE self.free_variables = pbv.free_variablesNEWLINE self.prop_setters = pbv.prop_settersNEWLINE self.encapsulating_funcs = pbv.encapsulating_funcsNEWLINE self.nested_fitems = pbv.nested_funcs.keys()NEWLINE self.fdefs_to_decorators = pbv.funcs_to_decoratorsNEWLINE self.singledispatch_impls = singledispatch_implsNEWLINENEWLINE self.visitor = visitorNEWLINENEWLINE # This list operates similarly to a function call stack for nested functions. Whenever aNEWLINE # function definition begins to be generated, a FuncInfo instance is added to the stack,NEWLINE # and information about that function (e.g. whether it is nested, its environment class toNEWLINE # be generated) is stored in that FuncInfo instance. When the function is done beingNEWLINE # generated, its corresponding FuncInfo is popped off the stack.NEWLINE self.fn_info = FuncInfo(INVALID_FUNC_DEF, '', '')NEWLINE self.fn_infos: List[FuncInfo] = [self.fn_info]NEWLINENEWLINE # This list operates as a stack of constructs that modify theNEWLINE # behavior of nonlocal control flow constructs.NEWLINE self.nonlocal_control: List[NonlocalControl] = []NEWLINENEWLINE self.errors = errorsNEWLINE # Notionally a list of all of the modules imported by theNEWLINE # module being compiled, but stored as an OrderedDict so weNEWLINE # can also do quick lookups.NEWLINE self.imports: OrderedDict[str, None] = OrderedDict()NEWLINENEWLINE # High-level controlNEWLINENEWLINE def set_module(self, module_name: str, module_path: str) -> None:NEWLINE """Set the name and path of the current module.NEWLINENEWLINE This must be called before transforming any AST nodes.NEWLINE """NEWLINE self.module_name = module_nameNEWLINE self.module_path = module_pathNEWLINENEWLINE @overloadNEWLINE def accept(self, node: Expression) -> Value: ...NEWLINENEWLINE @overloadNEWLINE def accept(self, node: Statement) -> None: ...NEWLINENEWLINE def accept(self, node: Union[Statement, Expression]) -> Optional[Value]:NEWLINE """Transform an expression or a statement."""NEWLINE with self.catch_errors(node.line):NEWLINE if isinstance(node, Expression):NEWLINE try:NEWLINE res = node.accept(self.visitor)NEWLINE res = self.coerce(res, self.node_type(node), node.line)NEWLINE # If we hit an error during compilation, we want toNEWLINE # keep trying, so we can produce more errorNEWLINE # messages. Generate a temp of the right type to keepNEWLINE # from causing more downstream trouble.NEWLINE except UnsupportedException:NEWLINE res = Register(self.node_type(node))NEWLINE return resNEWLINE else:NEWLINE try:NEWLINE node.accept(self.visitor)NEWLINE except UnsupportedException:NEWLINE passNEWLINE return NoneNEWLINENEWLINE # Pass through methods for the most common low-level builder ops, for convenience.NEWLINENEWLINE def add(self, op: Op) -> Value:NEWLINE return self.builder.add(op)NEWLINENEWLINE def goto(self, target: BasicBlock) -> None:NEWLINE self.builder.goto(target)NEWLINENEWLINE def activate_block(self, block: BasicBlock) -> None:NEWLINE self.builder.activate_block(block)NEWLINENEWLINE def goto_and_activate(self, block: BasicBlock) -> None:NEWLINE self.builder.goto_and_activate(block)NEWLINENEWLINE def self(self) -> Register:NEWLINE return self.builder.self()NEWLINENEWLINE def py_get_attr(self, obj: Value, attr: str, line: int) -> Value:NEWLINE return self.builder.py_get_attr(obj, attr, line)NEWLINENEWLINE def load_str(self, value: str) -> Value:NEWLINE return self.builder.load_str(value)NEWLINENEWLINE def load_bytes_from_str_literal(self, value: str) -> Value:NEWLINE """Load bytes object from a string literal.NEWLINENEWLINE The literal characters of BytesExpr (the characters inside b'')NEWLINE are stored in BytesExpr.value, whose type is 'str' not 'bytes'.NEWLINE Thus we perform a special conversion here.NEWLINE """NEWLINE bytes_value = bytes(value, 'utf8').decode('unicode-escape').encode('raw-unicode-escape')NEWLINE return self.builder.load_bytes(bytes_value)NEWLINENEWLINE def load_int(self, value: int) -> Value:NEWLINE return self.builder.load_int(value)NEWLINENEWLINE def unary_op(self, lreg: Value, expr_op: str, line: int) -> Value:NEWLINE return self.builder.unary_op(lreg, expr_op, line)NEWLINENEWLINE def binary_op(self, lreg: Value, rreg: Value, expr_op: str, line: int) -> Value:NEWLINE return self.builder.binary_op(lreg, rreg, expr_op, line)NEWLINENEWLINE def coerce(self, src: Value, target_type: RType, line: int, force: bool = False) -> Value:NEWLINE return self.builder.coerce(src, target_type, line, force)NEWLINENEWLINE def none_object(self) -> Value:NEWLINE return self.builder.none_object()NEWLINENEWLINE def none(self) -> Value:NEWLINE return self.builder.none()NEWLINENEWLINE def true(self) -> Value:NEWLINE return self.builder.true()NEWLINENEWLINE def false(self) -> Value:NEWLINE return self.builder.false()NEWLINENEWLINE def new_list_op(self, values: List[Value], line: int) -> Value:NEWLINE return self.builder.new_list_op(values, line)NEWLINENEWLINE def new_set_op(self, values: List[Value], line: int) -> Value:NEWLINE return self.builder.new_set_op(values, line)NEWLINENEWLINE def translate_is_op(self,NEWLINE lreg: Value,NEWLINE rreg: Value,NEWLINE expr_op: str,NEWLINE line: int) -> Value:NEWLINE return self.builder.translate_is_op(lreg, rreg, expr_op, line)NEWLINENEWLINE def py_call(self,NEWLINE function: Value,NEWLINE arg_values: List[Value],NEWLINE line: int,NEWLINE arg_kinds: Optional[List[ArgKind]] = None,NEWLINE arg_names: Optional[Sequence[Optional[str]]] = None) -> Value:NEWLINE return self.builder.py_call(function, arg_values, line, arg_kinds, arg_names)NEWLINENEWLINE def add_bool_branch(self, value: Value, true: BasicBlock, false: BasicBlock) -> None:NEWLINE self.builder.add_bool_branch(value, true, false)NEWLINENEWLINE def load_native_type_object(self, fullname: str) -> Value:NEWLINE return self.builder.load_native_type_object(fullname)NEWLINENEWLINE def gen_method_call(self,NEWLINE base: Value,NEWLINE name: str,NEWLINE arg_values: List[Value],NEWLINE result_type: Optional[RType],NEWLINE line: int,NEWLINE arg_kinds: Optional[List[ArgKind]] = None,NEWLINE arg_names: Optional[List[Optional[str]]] = None) -> Value:NEWLINE return self.builder.gen_method_call(NEWLINE base, name, arg_values, result_type, line, arg_kinds, arg_namesNEWLINE )NEWLINENEWLINE def load_module(self, name: str) -> Value:NEWLINE return self.builder.load_module(name)NEWLINENEWLINE def call_c(self, desc: CFunctionDescription, args: List[Value], line: int) -> Value:NEWLINE return self.builder.call_c(desc, args, line)NEWLINENEWLINE def int_op(self, type: RType, lhs: Value, rhs: Value, op: int, line: int) -> Value:NEWLINE return self.builder.int_op(type, lhs, rhs, op, line)NEWLINENEWLINE def compare_tagged(self, lhs: Value, rhs: Value, op: str, line: int) -> Value:NEWLINE return self.builder.compare_tagged(lhs, rhs, op, line)NEWLINENEWLINE def compare_tuples(self, lhs: Value, rhs: Value, op: str, line: int) -> Value:NEWLINE return self.builder.compare_tuples(lhs, rhs, op, line)NEWLINENEWLINE def builtin_len(self, val: Value, line: int) -> Value:NEWLINE return self.builder.builtin_len(val, line)NEWLINENEWLINE def new_tuple(self, items: List[Value], line: int) -> Value:NEWLINE return self.builder.new_tuple(items, line)NEWLINENEWLINE # Helpers for IR buildingNEWLINENEWLINE def add_to_non_ext_dict(self, non_ext: NonExtClassInfo,NEWLINE key: str, val: Value, line: int) -> None:NEWLINE # Add an attribute entry into the class dict of a non-extension class.NEWLINE key_unicode = self.load_str(key)NEWLINE self.call_c(dict_set_item_op, [non_ext.dict, key_unicode, val], line)NEWLINENEWLINE def gen_import_from(self, id: str, globals_dict: Value,NEWLINE imported: List[str], line: int) -> Value:NEWLINE self.imports[id] = NoneNEWLINENEWLINE null_dict = Integer(0, dict_rprimitive, line)NEWLINE names_to_import = self.new_list_op([self.load_str(name) for name in imported], line)NEWLINE zero_int = Integer(0, c_int_rprimitive, line)NEWLINE value = self.call_c(NEWLINE import_extra_args_op,NEWLINE [self.load_str(id), globals_dict, null_dict, names_to_import, zero_int],NEWLINE line,NEWLINE )NEWLINE self.add(InitStatic(value, id, namespace=NAMESPACE_MODULE))NEWLINE return valueNEWLINENEWLINE def gen_import(self, id: str, line: int) -> None:NEWLINE self.imports[id] = NoneNEWLINENEWLINE needs_import, out = BasicBlock(), BasicBlock()NEWLINE self.check_if_module_loaded(id, line, needs_import, out)NEWLINENEWLINE self.activate_block(needs_import)NEWLINE value = self.call_c(import_op, [self.load_str(id)], line)NEWLINE self.add(InitStatic(value, id, namespace=NAMESPACE_MODULE))NEWLINE self.goto_and_activate(out)NEWLINENEWLINE def check_if_module_loaded(self, id: str, line: int,NEWLINE needs_import: BasicBlock, out: BasicBlock) -> None:NEWLINE """Generate code that checks if the module `id` has been loaded yet.NEWLINENEWLINE Arguments:NEWLINE id: name of module to check if importedNEWLINE line: line number that the import occurs onNEWLINE needs_import: the BasicBlock that is run if the module has not been loaded yetNEWLINE out: the BasicBlock that is run if the module has already been loaded"""NEWLINE first_load = self.load_module(id)NEWLINE comparison = self.translate_is_op(first_load, self.none_object(), 'is not', line)NEWLINE self.add_bool_branch(comparison, out, needs_import)NEWLINENEWLINE def get_module(self, module: str, line: int) -> Value:NEWLINE # Python 3.7 has a nice 'PyImport_GetModule' function that we can't use :(NEWLINE mod_dict = self.call_c(get_module_dict_op, [], line)NEWLINE # Get module object from modules dict.NEWLINE return self.call_c(dict_get_item_op,NEWLINE [mod_dict, self.load_str(module)], line)NEWLINENEWLINE def get_module_attr(self, module: str, attr: str, line: int) -> Value:NEWLINE """Look up an attribute of a module without storing it in the local namespace.NEWLINENEWLINE For example, get_module_attr('typing', 'TypedDict', line) results inNEWLINE the value of 'typing.TypedDict'.NEWLINENEWLINE Import the module if needed.NEWLINE """NEWLINE self.gen_import(module, line)NEWLINE module_obj = self.get_module(module, line)NEWLINE return self.py_get_attr(module_obj, attr, line)NEWLINENEWLINE def assign_if_null(self, target: Register,NEWLINE get_val: Callable[[], Value], line: int) -> None:NEWLINE """If target is NULL, assign value produced by get_val to it."""NEWLINE error_block, body_block = BasicBlock(), BasicBlock()NEWLINE self.add(Branch(target, error_block, body_block, Branch.IS_ERROR))NEWLINE self.activate_block(error_block)NEWLINE self.add(Assign(target, self.coerce(get_val(), target.type, line)))NEWLINE self.goto(body_block)NEWLINE self.activate_block(body_block)NEWLINENEWLINE def maybe_add_implicit_return(self) -> None:NEWLINE if is_none_rprimitive(self.ret_types[-1]) or is_object_rprimitive(self.ret_types[-1]):NEWLINE self.add_implicit_return()NEWLINE else:NEWLINE self.add_implicit_unreachable()NEWLINENEWLINE def add_implicit_return(self) -> None:NEWLINE block = self.builder.blocks[-1]NEWLINE if not block.terminated:NEWLINE retval = self.coerce(self.builder.none(), self.ret_types[-1], -1)NEWLINE self.nonlocal_control[-1].gen_return(self, retval, self.fn_info.fitem.line)NEWLINENEWLINE def add_implicit_unreachable(self) -> None:NEWLINE block = self.builder.blocks[-1]NEWLINE if not block.terminated:NEWLINE self.add(Unreachable())NEWLINENEWLINE def disallow_class_assignments(self, lvalues: List[Lvalue], line: int) -> None:NEWLINE # Some best-effort attempts to disallow assigning to classNEWLINE # variables that aren't marked ClassVar, since we blatantlyNEWLINE # miscompile the interaction between instance and classNEWLINE # variables.NEWLINE for lvalue in lvalues:NEWLINE if (isinstance(lvalue, MemberExpr)NEWLINE and isinstance(lvalue.expr, RefExpr)NEWLINE and isinstance(lvalue.expr.node, TypeInfo)):NEWLINE var = lvalue.expr.node[lvalue.name].nodeNEWLINE if isinstance(var, Var) and not var.is_classvar:NEWLINE self.error(NEWLINE "Only class variables defined as ClassVar can be assigned to",NEWLINE line)NEWLINENEWLINE def non_function_scope(self) -> bool:NEWLINE # Currently the stack always has at least two items: dummy and top-level.NEWLINE return len(self.fn_infos) <= 2NEWLINENEWLINE def init_final_static(self,NEWLINE lvalue: Lvalue,NEWLINE rvalue_reg: Value,NEWLINE class_name: Optional[str] = None,NEWLINE *,NEWLINE type_override: Optional[RType] = None) -> None:NEWLINE assert isinstance(lvalue, NameExpr)NEWLINE assert isinstance(lvalue.node, Var)NEWLINE if lvalue.node.final_value is None:NEWLINE if class_name is None:NEWLINE name = lvalue.nameNEWLINE else:NEWLINE name = '{}.{}'.format(class_name, lvalue.name)NEWLINE assert name is not None, "Full name not set for variable"NEWLINE coerced = self.coerce(rvalue_reg, type_override or self.node_type(lvalue), lvalue.line)NEWLINE self.final_names.append((name, coerced.type))NEWLINE self.add(InitStatic(coerced, name, self.module_name))NEWLINENEWLINE def load_final_static(self, fullname: str, typ: RType, line: int,NEWLINE error_name: Optional[str] = None) -> Value:NEWLINE split_name = split_target(self.graph, fullname)NEWLINE assert split_name is not NoneNEWLINE module, name = split_nameNEWLINE return self.builder.load_static_checked(NEWLINE typ, name, module, line=line,NEWLINE error_msg='value for final name "{}" was not set'.format(error_name))NEWLINENEWLINE def load_final_literal_value(self, val: Union[int, str, bytes, float, bool],NEWLINE line: int) -> Value:NEWLINE """Load value of a final name or class-level attribute."""NEWLINE if isinstance(val, bool):NEWLINE if val:NEWLINE return self.true()NEWLINE else:NEWLINE return self.false()NEWLINE elif isinstance(val, int):NEWLINE # TODO: take care of negative integer initializersNEWLINE # (probably easier to fix this in mypy itself).NEWLINE return self.builder.load_int(val)NEWLINE elif isinstance(val, float):NEWLINE return self.builder.load_float(val)NEWLINE elif isinstance(val, str):NEWLINE return self.builder.load_str(val)NEWLINE elif isinstance(val, bytes):NEWLINE return self.builder.load_bytes(val)NEWLINE else:NEWLINE assert False, "Unsupported final literal value"NEWLINENEWLINE def get_assignment_target(self, lvalue: Lvalue,NEWLINE line: int = -1) -> AssignmentTarget:NEWLINE if isinstance(lvalue, NameExpr):NEWLINE # If we are visiting a decorator, then the SymbolNode we really want to be looking atNEWLINE # is the function that is decorated, not the entire Decorator node itself.NEWLINE symbol = lvalue.nodeNEWLINE if isinstance(symbol, Decorator):NEWLINE symbol = symbol.funcNEWLINE if symbol is None:NEWLINE # New semantic analyzer doesn't create ad-hoc Vars for special forms.NEWLINE assert lvalue.is_special_formNEWLINE symbol = Var(lvalue.name)NEWLINE if lvalue.kind == LDEF:NEWLINE if symbol not in self.symtables[-1]:NEWLINE # If the function is a generator function, then first define a new variableNEWLINE # in the current function's environment class. Next, define a target thatNEWLINE # refers to the newly defined variable in that environment class. Add theNEWLINE # target to the table containing class environment variables, as well as theNEWLINE # current environment.NEWLINE if self.fn_info.is_generator:NEWLINE return self.add_var_to_env_class(symbol, self.node_type(lvalue),NEWLINE self.fn_info.generator_class,NEWLINE reassign=False)NEWLINENEWLINE # Otherwise define a new local variable.NEWLINE return self.add_local_reg(symbol, self.node_type(lvalue))NEWLINE else:NEWLINE # Assign to a previously defined variable.NEWLINE return self.lookup(symbol)NEWLINE elif lvalue.kind == GDEF:NEWLINE globals_dict = self.load_globals_dict()NEWLINE name = self.load_str(lvalue.name)NEWLINE return AssignmentTargetIndex(globals_dict, name)NEWLINE else:NEWLINE assert False, lvalue.kindNEWLINE elif isinstance(lvalue, IndexExpr):NEWLINE # Indexed assignment x[y] = eNEWLINE base = self.accept(lvalue.base)NEWLINE index = self.accept(lvalue.index)NEWLINE return AssignmentTargetIndex(base, index)NEWLINE elif isinstance(lvalue, MemberExpr):NEWLINE # Attribute assignment x.y = eNEWLINE obj = self.accept(lvalue.expr)NEWLINE return AssignmentTargetAttr(obj, lvalue.name)NEWLINE elif isinstance(lvalue, TupleExpr):NEWLINE # Multiple assignment a, ..., b = eNEWLINE star_idx: Optional[int] = NoneNEWLINE lvalues = []NEWLINE for idx, item in enumerate(lvalue.items):NEWLINE targ = self.get_assignment_target(item)NEWLINE lvalues.append(targ)NEWLINE if isinstance(item, StarExpr):NEWLINE if star_idx is not None:NEWLINE self.error("Two starred expressions in assignment", line)NEWLINE star_idx = idxNEWLINENEWLINE return AssignmentTargetTuple(lvalues, star_idx)NEWLINENEWLINE elif isinstance(lvalue, StarExpr):NEWLINE return self.get_assignment_target(lvalue.expr)NEWLINENEWLINE assert False, 'Unsupported lvalue: %r' % lvalueNEWLINENEWLINE def read(self, target: Union[Value, AssignmentTarget], line: int = -1) -> Value:NEWLINE if isinstance(target, Value):NEWLINE return targetNEWLINE if isinstance(target, AssignmentTargetRegister):NEWLINE return target.registerNEWLINE if isinstance(target, AssignmentTargetIndex):NEWLINE reg = self.gen_method_call(NEWLINE target.base, '__getitem__', [target.index], target.type, line)NEWLINE if reg is not None:NEWLINE return regNEWLINE assert False, target.base.typeNEWLINE if isinstance(target, AssignmentTargetAttr):NEWLINE if isinstance(target.obj.type, RInstance) and target.obj.type.class_ir.is_ext_class:NEWLINE return self.add(GetAttr(target.obj, target.attr, line))NEWLINE else:NEWLINE return self.py_get_attr(target.obj, target.attr, line)NEWLINENEWLINE assert False, 'Unsupported lvalue: %r' % targetNEWLINENEWLINE def assign(self,NEWLINE target: Union[Register, AssignmentTarget],NEWLINE rvalue_reg: Value,NEWLINE line: int) -> None:NEWLINE if isinstance(target, Register):NEWLINE self.add(Assign(target, rvalue_reg))NEWLINE elif isinstance(target, AssignmentTargetRegister):NEWLINE rvalue_reg = self.coerce(rvalue_reg, target.type, line)NEWLINE self.add(Assign(target.register, rvalue_reg))NEWLINE elif isinstance(target, AssignmentTargetAttr):NEWLINE if isinstance(target.obj_type, RInstance):NEWLINE rvalue_reg = self.coerce(rvalue_reg, target.type, line)NEWLINE self.add(SetAttr(target.obj, target.attr, rvalue_reg, line))NEWLINE else:NEWLINE key = self.load_str(target.attr)NEWLINE boxed_reg = self.builder.box(rvalue_reg)NEWLINE self.call_c(py_setattr_op, [target.obj, key, boxed_reg], line)NEWLINE elif isinstance(target, AssignmentTargetIndex):NEWLINE target_reg2 = self.gen_method_call(NEWLINE target.base, '__setitem__', [target.index, rvalue_reg], None, line)NEWLINE assert target_reg2 is not None, target.base.typeNEWLINE elif isinstance(target, AssignmentTargetTuple):NEWLINE if isinstance(rvalue_reg.type, RTuple) and target.star_idx is None:NEWLINE rtypes = rvalue_reg.type.typesNEWLINE assert len(rtypes) == len(target.items)NEWLINE for i in range(len(rtypes)):NEWLINE item_value = self.add(TupleGet(rvalue_reg, i, line))NEWLINE self.assign(target.items[i], item_value, line)NEWLINE elif ((is_list_rprimitive(rvalue_reg.type) or is_tuple_rprimitive(rvalue_reg.type))NEWLINE and target.star_idx is None):NEWLINE self.process_sequence_assignment(target, rvalue_reg, line)NEWLINE else:NEWLINE self.process_iterator_tuple_assignment(target, rvalue_reg, line)NEWLINE else:NEWLINE assert False, 'Unsupported assignment target'NEWLINENEWLINE def process_sequence_assignment(self,NEWLINE target: AssignmentTargetTuple,NEWLINE rvalue: Value,NEWLINE line: int) -> None:NEWLINE """Process assignment like 'x, y = s', where s is a variable-length list or tuple."""NEWLINE # Check the length of sequence.NEWLINE expected_len = Integer(len(target.items), c_pyssize_t_rprimitive)NEWLINE self.builder.call_c(check_unpack_count_op, [rvalue, expected_len], line)NEWLINENEWLINE # Read sequence items.NEWLINE values = []NEWLINE for i in range(len(target.items)):NEWLINE item = target.items[i]NEWLINE index = self.builder.load_int(i)NEWLINE if is_list_rprimitive(rvalue.type):NEWLINE item_value = self.call_c(list_get_item_unsafe_op, [rvalue, index], line)NEWLINE else:NEWLINE item_value = self.builder.gen_method_call(NEWLINE rvalue, '__getitem__', [index], item.type, line)NEWLINE values.append(item_value)NEWLINENEWLINE # Assign sequence items to the target lvalues.NEWLINE for lvalue, value in zip(target.items, values):NEWLINE self.assign(lvalue, value, line)NEWLINENEWLINE def process_iterator_tuple_assignment_helper(self,NEWLINE litem: AssignmentTarget,NEWLINE ritem: Value, line: int) -> None:NEWLINE error_block, ok_block = BasicBlock(), BasicBlock()NEWLINE self.add(Branch(ritem, error_block, ok_block, Branch.IS_ERROR))NEWLINENEWLINE self.activate_block(error_block)NEWLINE self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR,NEWLINE 'not enough values to unpack', line))NEWLINE self.add(Unreachable())NEWLINENEWLINE self.activate_block(ok_block)NEWLINE self.assign(litem, ritem, line)NEWLINENEWLINE def process_iterator_tuple_assignment(self,NEWLINE target: AssignmentTargetTuple,NEWLINE rvalue_reg: Value,NEWLINE line: int) -> None:NEWLINENEWLINE iterator = self.call_c(iter_op, [rvalue_reg], line)NEWLINENEWLINE # This may be the whole lvalue list if there is no starred valueNEWLINE split_idx = target.star_idx if target.star_idx is not None else len(target.items)NEWLINENEWLINE # Assign values before the first starred valueNEWLINE for litem in target.items[:split_idx]:NEWLINE ritem = self.call_c(next_op, [iterator], line)NEWLINE error_block, ok_block = BasicBlock(), BasicBlock()NEWLINE self.add(Branch(ritem, error_block, ok_block, Branch.IS_ERROR))NEWLINENEWLINE self.activate_block(error_block)NEWLINE self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR,NEWLINE 'not enough values to unpack', line))NEWLINE self.add(Unreachable())NEWLINENEWLINE self.activate_block(ok_block)NEWLINENEWLINE self.assign(litem, ritem, line)NEWLINENEWLINE # Assign the starred value and all values after itNEWLINE if target.star_idx is not None:NEWLINE post_star_vals = target.items[split_idx + 1:]NEWLINE iter_list = self.call_c(to_list, [iterator], line)NEWLINE iter_list_len = self.builtin_len(iter_list, line)NEWLINE post_star_len = Integer(len(post_star_vals))NEWLINE condition = self.binary_op(post_star_len, iter_list_len, '<=', line)NEWLINENEWLINE error_block, ok_block = BasicBlock(), BasicBlock()NEWLINE self.add(Branch(condition, ok_block, error_block, Branch.BOOL))NEWLINENEWLINE self.activate_block(error_block)NEWLINE self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR,NEWLINE 'not enough values to unpack', line))NEWLINE self.add(Unreachable())NEWLINENEWLINE self.activate_block(ok_block)NEWLINENEWLINE for litem in reversed(post_star_vals):NEWLINE ritem = self.call_c(list_pop_last, [iter_list], line)NEWLINE self.assign(litem, ritem, line)NEWLINENEWLINE # Assign the starred valueNEWLINE self.assign(target.items[target.star_idx], iter_list, line)NEWLINENEWLINE # There is no starred value, so check if there are extra values in rhs thatNEWLINE # have not been assigned.NEWLINE else:NEWLINE extra = self.call_c(next_op, [iterator], line)NEWLINE error_block, ok_block = BasicBlock(), BasicBlock()NEWLINE self.add(Branch(extra, ok_block, error_block, Branch.IS_ERROR))NEWLINENEWLINE self.activate_block(error_block)NEWLINE self.add(RaiseStandardError(RaiseStandardError.VALUE_ERROR,NEWLINE 'too many values to unpack', line))NEWLINE self.add(Unreachable())NEWLINENEWLINE self.activate_block(ok_block)NEWLINENEWLINE def push_loop_stack(self, continue_block: BasicBlock, break_block: BasicBlock) -> None:NEWLINE self.nonlocal_control.append(NEWLINE LoopNonlocalControl(self.nonlocal_control[-1], continue_block, break_block))NEWLINENEWLINE def pop_loop_stack(self) -> None:NEWLINE self.nonlocal_control.pop()NEWLINENEWLINE def spill(self, value: Value) -> AssignmentTarget:NEWLINE """Moves a given Value instance into the generator class' environment class."""NEWLINE name = '{}{}'.format(TEMP_ATTR_NAME, self.temp_counter)NEWLINE self.temp_counter += 1NEWLINE target = self.add_var_to_env_class(Var(name), value.type, self.fn_info.generator_class)NEWLINE # Shouldn't be able to fail, so -1 for lineNEWLINE self.assign(target, value, -1)NEWLINE return targetNEWLINENEWLINE def maybe_spill(self, value: Value) -> Union[Value, AssignmentTarget]:NEWLINE """NEWLINE Moves a given Value instance into the environment class for generator functions. ForNEWLINE non-generator functions, leaves the Value instance as it is.NEWLINENEWLINE Returns an AssignmentTarget associated with the Value for generator functions and theNEWLINE original Value itself for non-generator functions.NEWLINE """NEWLINE if self.fn_info.is_generator:NEWLINE return self.spill(value)NEWLINE return valueNEWLINENEWLINE def maybe_spill_assignable(self, value: Value) -> Union[Register, AssignmentTarget]:NEWLINE """NEWLINE Moves a given Value instance into the environment class for generator functions. ForNEWLINE non-generator functions, allocate a temporary Register.NEWLINENEWLINE Returns an AssignmentTarget associated with the Value for generator functions and anNEWLINE assignable Register for non-generator functions.NEWLINE """NEWLINE if self.fn_info.is_generator:NEWLINE return self.spill(value)NEWLINENEWLINE if isinstance(value, Register):NEWLINE return valueNEWLINENEWLINE # Allocate a temporary register for the assignable value.NEWLINE reg = Register(value.type)NEWLINE self.assign(reg, value, -1)NEWLINE return regNEWLINENEWLINE def extract_int(self, e: Expression) -> Optional[int]:NEWLINE if isinstance(e, IntExpr):NEWLINE return e.valueNEWLINE elif isinstance(e, UnaryExpr) and e.op == '-' and isinstance(e.expr, IntExpr):NEWLINE return -e.expr.valueNEWLINE else:NEWLINE return NoneNEWLINENEWLINE def get_sequence_type(self, expr: Expression) -> RType:NEWLINE target_type = get_proper_type(self.types[expr])NEWLINE assert isinstance(target_type, Instance)NEWLINE if target_type.type.fullname == 'builtins.str':NEWLINE return str_rprimitiveNEWLINE else:NEWLINE return self.type_to_rtype(target_type.args[0])NEWLINENEWLINE def get_dict_base_type(self, expr: Expression) -> Instance:NEWLINE """Find dict type of a dict-like expression.NEWLINENEWLINE This is useful for dict subclasses like SymbolTable.NEWLINE """NEWLINE target_type = get_proper_type(self.types[expr])NEWLINE assert isinstance(target_type, Instance)NEWLINE dict_base = next(base for base in target_type.type.mroNEWLINE if base.fullname == 'builtins.dict')NEWLINE return map_instance_to_supertype(target_type, dict_base)NEWLINENEWLINE def get_dict_key_type(self, expr: Expression) -> RType:NEWLINE dict_base_type = self.get_dict_base_type(expr)NEWLINE return self.type_to_rtype(dict_base_type.args[0])NEWLINENEWLINE def get_dict_value_type(self, expr: Expression) -> RType:NEWLINE dict_base_type = self.get_dict_base_type(expr)NEWLINE return self.type_to_rtype(dict_base_type.args[1])NEWLINENEWLINE def get_dict_item_type(self, expr: Expression) -> RType:NEWLINE key_type = self.get_dict_key_type(expr)NEWLINE value_type = self.get_dict_value_type(expr)NEWLINE return RTuple([key_type, value_type])NEWLINENEWLINE def _analyze_iterable_item_type(self, expr: Expression) -> Type:NEWLINE """Return the item type given by 'expr' in an iterable context."""NEWLINE # This logic is copied from mypy's TypeChecker.analyze_iterable_item_type.NEWLINE iterable = get_proper_type(self.types[expr])NEWLINE echk = self.graph[self.module_name].type_checker().expr_checkerNEWLINE iterator = echk.check_method_call_by_name('__iter__', iterable, [], [], expr)[0]NEWLINENEWLINE from mypy.join import join_typesNEWLINE if isinstance(iterable, TupleType):NEWLINE joined: Type = UninhabitedType()NEWLINE for item in iterable.items:NEWLINE joined = join_types(joined, item)NEWLINE return joinedNEWLINE else:NEWLINE # Non-tuple iterable.NEWLINE return echk.check_method_call_by_name('__next__', iterator, [], [], expr)[0]NEWLINENEWLINE def is_native_module(self, module: str) -> bool:NEWLINE """Is the given module one compiled by mypyc?"""NEWLINE return module in self.mapper.group_mapNEWLINENEWLINE def is_native_ref_expr(self, expr: RefExpr) -> bool:NEWLINE if expr.node is None:NEWLINE return FalseNEWLINE if '.' in expr.node.fullname:NEWLINE return self.is_native_module(expr.node.fullname.rpartition('.')[0])NEWLINE return TrueNEWLINENEWLINE def is_native_module_ref_expr(self, expr: RefExpr) -> bool:NEWLINE return self.is_native_ref_expr(expr) and expr.kind == GDEFNEWLINENEWLINE def is_synthetic_type(self, typ: TypeInfo) -> bool:NEWLINE """Is a type something other than just a class we've created?"""NEWLINE return typ.is_named_tuple or typ.is_newtype or typ.typeddict_type is not NoneNEWLINENEWLINE def get_final_ref(self, expr: MemberExpr) -> Optional[Tuple[str, Var, bool]]:NEWLINE """Check if `expr` is a final attribute.NEWLINENEWLINE This needs to be done differently for class and module attributes toNEWLINE correctly determine fully qualified name. Return a tuple that consists ofNEWLINE the qualified name, the corresponding Var node, and a flag indicating whetherNEWLINE the final name was defined in a compiled module. Return None if `expr` does notNEWLINE refer to a final attribute.NEWLINE """NEWLINE final_var = NoneNEWLINE if isinstance(expr.expr, RefExpr) and isinstance(expr.expr.node, TypeInfo):NEWLINE # a class attributeNEWLINE sym = expr.expr.node.get(expr.name)NEWLINE if sym and isinstance(sym.node, Var):NEWLINE # Enum attribute are treated as final since they are added to the global cacheNEWLINE expr_fullname = expr.expr.node.bases[0].type.fullnameNEWLINE is_final = sym.node.is_final or expr_fullname == 'enum.Enum'NEWLINE if is_final:NEWLINE final_var = sym.nodeNEWLINE fullname = '{}.{}'.format(sym.node.info.fullname, final_var.name)NEWLINE native = self.is_native_module(expr.expr.node.module_name)NEWLINE elif self.is_module_member_expr(expr):NEWLINE # a module attributeNEWLINE if isinstance(expr.node, Var) and expr.node.is_final:NEWLINE final_var = expr.nodeNEWLINE fullname = expr.node.fullnameNEWLINE native = self.is_native_ref_expr(expr)NEWLINE if final_var is not None:NEWLINE return fullname, final_var, nativeNEWLINE return NoneNEWLINENEWLINE def emit_load_final(self, final_var: Var, fullname: str,NEWLINE name: str, native: bool, typ: Type, line: int) -> Optional[Value]:NEWLINE """Emit code for loading value of a final name (if possible).NEWLINENEWLINE Args:NEWLINE final_var: Var corresponding to the final nameNEWLINE fullname: its qualified nameNEWLINE name: shorter name to show in errorsNEWLINE native: whether the name was defined in a compiled moduleNEWLINE typ: its typeNEWLINE line: line number where loading occursNEWLINE """NEWLINE if final_var.final_value is not None: # this is safe even for non-native namesNEWLINE return self.load_final_literal_value(final_var.final_value, line)NEWLINE elif native:NEWLINE return self.load_final_static(fullname, self.mapper.type_to_rtype(typ),NEWLINE line, name)NEWLINE else:NEWLINE return NoneNEWLINENEWLINE def is_module_member_expr(self, expr: MemberExpr) -> bool:NEWLINE return isinstance(expr.expr, RefExpr) and isinstance(expr.expr.node, MypyFile)NEWLINENEWLINE def call_refexpr_with_args(NEWLINE self, expr: CallExpr, callee: RefExpr, arg_values: List[Value]) -> Value:NEWLINENEWLINE # Handle data-driven special-cased primitive call ops.NEWLINE if callee.fullname is not None and expr.arg_kinds == [ARG_POS] * len(arg_values):NEWLINE call_c_ops_candidates = function_ops.get(callee.fullname, [])NEWLINE target = self.builder.matching_call_c(call_c_ops_candidates, arg_values,NEWLINE expr.line, self.node_type(expr))NEWLINE if target:NEWLINE return targetNEWLINENEWLINE # Standard native call if signature and fullname are good and all arguments are positionalNEWLINE # or named.NEWLINE callee_node = callee.nodeNEWLINE if isinstance(callee_node, OverloadedFuncDef):NEWLINE callee_node = callee_node.implNEWLINE # TODO: use native calls for any decorated functions which have all their decoratorsNEWLINE # removed, not just singledispatch functions (which we don't do now just in case thoseNEWLINE # decorated functions are callable classes or cannot be called without the python API forNEWLINE # some other reason)NEWLINE if (NEWLINE isinstance(callee_node, Decorator)NEWLINE and callee_node.func not in self.fdefs_to_decoratorsNEWLINE and callee_node.func in self.singledispatch_implsNEWLINE ):NEWLINE callee_node = callee_node.funcNEWLINE if (callee_node is not NoneNEWLINE and callee.fullname is not NoneNEWLINE and callee_node in self.mapper.func_to_declNEWLINE and all(kind in (ARG_POS, ARG_NAMED) for kind in expr.arg_kinds)):NEWLINE decl = self.mapper.func_to_decl[callee_node]NEWLINE return self.builder.call(decl, arg_values, expr.arg_kinds, expr.arg_names, expr.line)NEWLINENEWLINE # Fall back to a Python callNEWLINE function = self.accept(callee)NEWLINE return self.py_call(function, arg_values, expr.line,NEWLINE arg_kinds=expr.arg_kinds, arg_names=expr.arg_names)NEWLINENEWLINE def shortcircuit_expr(self, expr: OpExpr) -> Value:NEWLINE return self.builder.shortcircuit_helper(NEWLINE expr.op, self.node_type(expr),NEWLINE lambda: self.accept(expr.left),NEWLINE lambda: self.accept(expr.right),NEWLINE expr.lineNEWLINE )NEWLINENEWLINE # Conditional expressionsNEWLINENEWLINE def process_conditional(self, e: Expression, true: BasicBlock, false: BasicBlock) -> None:NEWLINE if isinstance(e, OpExpr) and e.op in ['and', 'or']:NEWLINE if e.op == 'and':NEWLINE # Short circuit 'and' in a conditional context.NEWLINE new = BasicBlock()NEWLINE self.process_conditional(e.left, new, false)NEWLINE self.activate_block(new)NEWLINE self.process_conditional(e.right, true, false)NEWLINE else:NEWLINE # Short circuit 'or' in a conditional context.NEWLINE new = BasicBlock()NEWLINE self.process_conditional(e.left, true, new)NEWLINE self.activate_block(new)NEWLINE self.process_conditional(e.right, true, false)NEWLINE elif isinstance(e, UnaryExpr) and e.op == 'not':NEWLINE self.process_conditional(e.expr, false, true)NEWLINE else:NEWLINE res = self.maybe_process_conditional_comparison(e, true, false)NEWLINE if res:NEWLINE returnNEWLINE # Catch-all for arbitrary expressions.NEWLINE reg = self.accept(e)NEWLINE self.add_bool_branch(reg, true, false)NEWLINENEWLINE def maybe_process_conditional_comparison(self,NEWLINE e: Expression,NEWLINE true: BasicBlock,NEWLINE false: BasicBlock) -> bool:NEWLINE """Transform simple tagged integer comparisons in a conditional context.NEWLINENEWLINE Return True if the operation is supported (and was transformed). Otherwise,NEWLINE do nothing and return False.NEWLINENEWLINE Args:NEWLINE e: Arbitrary expressionNEWLINE true: Branch target if comparison is trueNEWLINE false: Branch target if comparison is falseNEWLINE """NEWLINE if not isinstance(e, ComparisonExpr) or len(e.operands) != 2:NEWLINE return FalseNEWLINE ltype = self.node_type(e.operands[0])NEWLINE rtype = self.node_type(e.operands[1])NEWLINE if not is_tagged(ltype) or not is_tagged(rtype):NEWLINE return FalseNEWLINE op = e.operators[0]NEWLINE if op not in ('==', '!=', '<', '<=', '>', '>='):NEWLINE return FalseNEWLINE left = self.accept(e.operands[0])NEWLINE right = self.accept(e.operands[1])NEWLINE # "left op right" for two tagged integersNEWLINE self.builder.compare_tagged_condition(left, right, op, true, false, e.line)NEWLINE return TrueNEWLINENEWLINE # Basic helpersNEWLINENEWLINE def flatten_classes(self, arg: Union[RefExpr, TupleExpr]) -> Optional[List[ClassIR]]:NEWLINE """Flatten classes in isinstance(obj, (A, (B, C))).NEWLINENEWLINE If at least one item is not a reference to a native class, return None.NEWLINE """NEWLINE if isinstance(arg, RefExpr):NEWLINE if isinstance(arg.node, TypeInfo) and self.is_native_module_ref_expr(arg):NEWLINE ir = self.mapper.type_to_ir.get(arg.node)NEWLINE if ir:NEWLINE return [ir]NEWLINE return NoneNEWLINE else:NEWLINE res: List[ClassIR] = []NEWLINE for item in arg.items:NEWLINE if isinstance(item, (RefExpr, TupleExpr)):NEWLINE item_part = self.flatten_classes(item)NEWLINE if item_part is None:NEWLINE return NoneNEWLINE res.extend(item_part)NEWLINE else:NEWLINE return NoneNEWLINE return resNEWLINENEWLINE def enter(self, fn_info: Union[FuncInfo, str] = '') -> None:NEWLINE if isinstance(fn_info, str):NEWLINE fn_info = FuncInfo(name=fn_info)NEWLINE self.builder = LowLevelIRBuilder(self.current_module, self.mapper, self.options)NEWLINE self.builders.append(self.builder)NEWLINE self.symtables.append(OrderedDict())NEWLINE self.runtime_args.append([])NEWLINE self.fn_info = fn_infoNEWLINE self.fn_infos.append(self.fn_info)NEWLINE self.ret_types.append(none_rprimitive)NEWLINE if fn_info.is_generator:NEWLINE self.nonlocal_control.append(GeneratorNonlocalControl())NEWLINE else:NEWLINE self.nonlocal_control.append(BaseNonlocalControl())NEWLINE self.activate_block(BasicBlock())NEWLINENEWLINE def leave(self) -> Tuple[List[Register], List[RuntimeArg], List[BasicBlock], RType, FuncInfo]:NEWLINE builder = self.builders.pop()NEWLINE self.symtables.pop()NEWLINE runtime_args = self.runtime_args.pop()NEWLINE ret_type = self.ret_types.pop()NEWLINE fn_info = self.fn_infos.pop()NEWLINE self.nonlocal_control.pop()NEWLINE self.builder = self.builders[-1]NEWLINE self.fn_info = self.fn_infos[-1]NEWLINE return builder.args, runtime_args, builder.blocks, ret_type, fn_infoNEWLINENEWLINE def enter_method(self,NEWLINE class_ir: ClassIR,NEWLINE name: str,NEWLINE ret_type: RType,NEWLINE fn_info: Union[FuncInfo, str] = '',NEWLINE self_type: Optional[RType] = None) -> None:NEWLINE """Begin generating IR for a method.NEWLINENEWLINE If the method takes arguments, you should immediately afterwards callNEWLINE add_argument() for each non-self argument (self is created implicitly).NEWLINENEWLINE Call leave_method() to finish the generation of the method.NEWLINENEWLINE You can enter multiple methods at a time. They are maintained in aNEWLINE stack, and leave_method() leaves the topmost one.NEWLINENEWLINE Args:NEWLINE class_ir: Add method to this classNEWLINE name: Short name of the methodNEWLINE ret_type: Return type of the methodNEWLINE fn_info: Optionally, additional information about the methodNEWLINE self_type: If not None, override default type of the implicit 'self'NEWLINE argument (by default, derive type from class_ir)NEWLINE """NEWLINE self.enter(fn_info)NEWLINE self.function_name_stack.append(name)NEWLINE self.class_ir_stack.append(class_ir)NEWLINE self.ret_types[-1] = ret_typeNEWLINE if self_type is None:NEWLINE self_type = RInstance(class_ir)NEWLINE self.add_argument(SELF_NAME, self_type)NEWLINENEWLINE def add_argument(self, var: Union[str, Var], typ: RType, kind: ArgKind = ARG_POS) -> Register:NEWLINE """Declare an argument in the current function.NEWLINENEWLINE You should use this instead of directly calling add_local() in new code.NEWLINE """NEWLINE if isinstance(var, str):NEWLINE var = Var(var)NEWLINE reg = self.add_local(var, typ, is_arg=True)NEWLINE self.runtime_args[-1].append(RuntimeArg(var.name, typ, kind))NEWLINE return regNEWLINENEWLINE def leave_method(self) -> None:NEWLINE """Finish the generation of IR for a method."""NEWLINE arg_regs, args, blocks, ret_type, fn_info = self.leave()NEWLINE sig = FuncSignature(args, ret_type)NEWLINE name = self.function_name_stack.pop()NEWLINE class_ir = self.class_ir_stack.pop()NEWLINE decl = FuncDecl(name, class_ir.name, self.module_name, sig)NEWLINE ir = FuncIR(decl, arg_regs, blocks)NEWLINE class_ir.methods[name] = irNEWLINE class_ir.method_decls[name] = ir.declNEWLINE self.functions.append(ir)NEWLINENEWLINE def lookup(self, symbol: SymbolNode) -> SymbolTarget:NEWLINE return self.symtables[-1][symbol]NEWLINENEWLINE def add_local(self, symbol: SymbolNode, typ: RType, is_arg: bool = False) -> 'Register':NEWLINE """Add register that represents a symbol to the symbol table.NEWLINENEWLINE Args:NEWLINE is_arg: is this a function argumentNEWLINE """NEWLINE assert isinstance(symbol, SymbolNode)NEWLINE reg = Register(typ, symbol.name, is_arg=is_arg, line=symbol.line)NEWLINE self.symtables[-1][symbol] = AssignmentTargetRegister(reg)NEWLINE if is_arg:NEWLINE self.builder.args.append(reg)NEWLINE return regNEWLINENEWLINE def add_local_reg(self,NEWLINE symbol: SymbolNode,NEWLINE typ: RType,NEWLINE is_arg: bool = False) -> AssignmentTargetRegister:NEWLINE """Like add_local, but return an assignment target instead of value."""NEWLINE self.add_local(symbol, typ, is_arg)NEWLINE target = self.symtables[-1][symbol]NEWLINE assert isinstance(target, AssignmentTargetRegister)NEWLINE return targetNEWLINENEWLINE def add_self_to_env(self, cls: ClassIR) -> AssignmentTargetRegister:NEWLINE """Low-level function that adds a 'self' argument.NEWLINENEWLINE This is only useful if using enter() instead of enter_method().NEWLINE """NEWLINE return self.add_local_reg(Var(SELF_NAME), RInstance(cls), is_arg=True)NEWLINENEWLINE def add_target(self, symbol: SymbolNode, target: SymbolTarget) -> SymbolTarget:NEWLINE self.symtables[-1][symbol] = targetNEWLINE return targetNEWLINENEWLINE def type_to_rtype(self, typ: Optional[Type]) -> RType:NEWLINE return self.mapper.type_to_rtype(typ)NEWLINENEWLINE def node_type(self, node: Expression) -> RType:NEWLINE if isinstance(node, IntExpr):NEWLINE # TODO: Don't special case IntExprNEWLINE return int_rprimitiveNEWLINE if node not in self.types:NEWLINE return object_rprimitiveNEWLINE mypy_type = self.types[node]NEWLINE return self.type_to_rtype(mypy_type)NEWLINENEWLINE def add_var_to_env_class(self,NEWLINE var: SymbolNode,NEWLINE rtype: RType,NEWLINE base: Union[FuncInfo, ImplicitClass],NEWLINE reassign: bool = False) -> AssignmentTarget:NEWLINE # First, define the variable name as an attribute of the environment class, and thenNEWLINE # construct a target for that attribute.NEWLINE self.fn_info.env_class.attributes[var.name] = rtypeNEWLINE attr_target = AssignmentTargetAttr(base.curr_env_reg, var.name)NEWLINENEWLINE if reassign:NEWLINE # Read the local definition of the variable, and set the corresponding attribute ofNEWLINE # the environment class' variable to be that value.NEWLINE reg = self.read(self.lookup(var), self.fn_info.fitem.line)NEWLINE self.add(SetAttr(base.curr_env_reg, var.name, reg, self.fn_info.fitem.line))NEWLINENEWLINE # Override the local definition of the variable to instead point at the variable inNEWLINE # the environment class.NEWLINE return self.add_target(var, attr_target)NEWLINENEWLINE def is_builtin_ref_expr(self, expr: RefExpr) -> bool:NEWLINE assert expr.node, "RefExpr not resolved"NEWLINE return '.' in expr.node.fullname and expr.node.fullname.split('.')[0] == 'builtins'NEWLINENEWLINE def load_global(self, expr: NameExpr) -> Value:NEWLINE """Loads a Python-level global.NEWLINENEWLINE This takes a NameExpr and uses its name as a key to retrieve the corresponding PyObject *NEWLINE from the _globals dictionary in the C-generated code.NEWLINE """NEWLINE # If the global is from 'builtins', turn it into a module attr load insteadNEWLINE if self.is_builtin_ref_expr(expr):NEWLINE assert expr.node, "RefExpr not resolved"NEWLINE return self.load_module_attr_by_fullname(expr.node.fullname, expr.line)NEWLINE if (self.is_native_module_ref_expr(expr) and isinstance(expr.node, TypeInfo)NEWLINE and not self.is_synthetic_type(expr.node)):NEWLINE assert expr.fullname is not NoneNEWLINE return self.load_native_type_object(expr.fullname)NEWLINE return self.load_global_str(expr.name, expr.line)NEWLINENEWLINE def load_global_str(self, name: str, line: int) -> Value:NEWLINE _globals = self.load_globals_dict()NEWLINE reg = self.load_str(name)NEWLINE return self.call_c(dict_get_item_op, [_globals, reg], line)NEWLINENEWLINE def load_globals_dict(self) -> Value:NEWLINE return self.add(LoadStatic(dict_rprimitive, 'globals', self.module_name))NEWLINENEWLINE def load_module_attr_by_fullname(self, fullname: str, line: int) -> Value:NEWLINE module, _, name = fullname.rpartition('.')NEWLINE left = self.load_module(module)NEWLINE return self.py_get_attr(left, name, line)NEWLINENEWLINE # Lacks a good type because there wasn't a reasonable type in 3.5 :(NEWLINE def catch_errors(self, line: int) -> Any:NEWLINE return catch_errors(self.module_path, line)NEWLINENEWLINE def warning(self, msg: str, line: int) -> None:NEWLINE self.errors.warning(msg, self.module_path, line)NEWLINENEWLINE def error(self, msg: str, line: int) -> None:NEWLINE self.errors.error(msg, self.module_path, line)NEWLINENEWLINE def note(self, msg: str, line: int) -> None:NEWLINE self.errors.note(msg, self.module_path, line)NEWLINENEWLINENEWLINEdef gen_arg_defaults(builder: IRBuilder) -> None:NEWLINE """Generate blocks for arguments that have default values.NEWLINENEWLINE If the passed value is an error value, then assign the defaultNEWLINE value to the argument.NEWLINE """NEWLINE fitem = builder.fn_info.fitemNEWLINE for arg in fitem.arguments:NEWLINE if arg.initializer:NEWLINE target = builder.lookup(arg.variable)NEWLINENEWLINE def get_default() -> Value:NEWLINE assert arg.initializer is not NoneNEWLINENEWLINE # If it is constant, don't bother storing itNEWLINE if is_constant(arg.initializer):NEWLINE return builder.accept(arg.initializer)NEWLINENEWLINE # Because gen_arg_defaults runs before calculate_arg_defaults, weNEWLINE # add the static/attribute to final_names/the class here.NEWLINE elif not builder.fn_info.is_nested:NEWLINE name = fitem.fullname + '.' + arg.variable.nameNEWLINE builder.final_names.append((name, target.type))NEWLINE return builder.add(LoadStatic(target.type, name, builder.module_name))NEWLINE else:NEWLINE name = arg.variable.nameNEWLINE builder.fn_info.callable_class.ir.attributes[name] = target.typeNEWLINE return builder.add(NEWLINE GetAttr(builder.fn_info.callable_class.self_reg, name, arg.line))NEWLINE assert isinstance(target, AssignmentTargetRegister)NEWLINE builder.assign_if_null(target.register, get_default, arg.initializer.line)NEWLINE
import reNEWLINENEWLINE_TESTS = [NEWLINE "//REL/RELIDO",NEWLINE "TS//SI-G/TK//RS/OC/NF",NEWLINE "TS//SI-ABC-DEF//OC/NF",NEWLINE "TS//SI-G ABCD EFGH-XYZ//OC/NF",NEWLINE "TS//ANB/SI/TK/XNB//NF",NEWLINE "TS//SAR-BP-123/CA-XYZ YYY//NF",NEWLINE "TS//RD-CNWDI//NF",NEWLINE "S//FRD-SIGMA 14 18//REL",NEWLINE "//CTS//BOHEMIA",NEWLINE "//DEU S//NF",NEWLINE "//NS//ATOMAL//OC",NEWLINE "//JOINT S//REL",NEWLINE "TS//FGI DEU GBR//REL TO USA, DEU, GBR",NEWLINE "//FGI S//NF",NEWLINE "S//NF",NEWLINE "S//NF/PR",NEWLINE "U//SSI",NEWLINE]NEWLINENEWLINE_PATTERN = "^(U?|C|(S|TS)?(\/\/(((\w|\-)+)(\s(\w|\-)+)*)((\/(\w|\-)+)(\s(\w|\-)+)*)*)?)\/\/((((\w|\-)+)|(REL( TO ((\w|\-)+)(,\s?((\w|\-)+))*)?))((\/((\w|\-)+)|(REL( TO ((\w|\-)+)(,(\w|\-)+)*)?))*))$"NEWLINENEWLINEdef main():NEWLINE prog = re.compile(_PATTERN)NEWLINE for s in _TESTS:NEWLINE result = prog.match(s)NEWLINE print(s + " " + str(result))NEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINENEWLINENEWLINE
import osNEWLINEfrom threading import BrokenBarrierErrorNEWLINEimport timeNEWLINENEWLINEfrom multiprocess.context import TimeoutErrorNEWLINEfrom multiprocess.managers import StateNEWLINEimport numpy as npNEWLINEimport pytestNEWLINENEWLINEfrom catkit.multiprocessing import Process, SharedMemoryManagerNEWLINENEWLINETIMEOUT = 10 # Use a shorter timeout for testing.NEWLINENEWLINENEWLINEdef test_child_exception():NEWLINE def client_func():NEWLINE raise RuntimeError("123456789")NEWLINENEWLINE with SharedMemoryManager() as manager:NEWLINE client = Process(target=client_func)NEWLINE client.start()NEWLINE with pytest.raises(RuntimeError, match="123456789"):NEWLINE client.join()NEWLINENEWLINENEWLINEdef test_pid():NEWLINE def client_func(pid):NEWLINE manager = SharedMemoryManager()NEWLINE manager.connect()NEWLINE server_pid = manager.getpid()NEWLINENEWLINE client_pid = os.getpid()NEWLINE assert server_pid != client_pid, f"Server ({server_pid}) shouldn't be running on client ({client_pid})"NEWLINE assert server_pid == pid, f"Server ({server_pid}) connected to from client ({client_pid}) should be same as that started by manager ({pid})"NEWLINENEWLINE with SharedMemoryManager() as manager:NEWLINE parent_pid = os.getpid()NEWLINE server_pid = manager.getpid()NEWLINENEWLINE assert server_pid != parent_pidNEWLINENEWLINE n_clients = 2NEWLINE clients = [Process(target=client_func, args=(server_pid,)) for x in range(n_clients)]NEWLINENEWLINE for client in clients:NEWLINE client.start()NEWLINENEWLINE for client in clients:NEWLINE client.join()NEWLINENEWLINENEWLINEdef test_no_persistent_server():NEWLINE manager = SharedMemoryManager()NEWLINE with pytest.raises(ConnectionRefusedError):NEWLINE manager.connect()NEWLINENEWLINENEWLINEdef test_locks():NEWLINE def client_func2():NEWLINE manager = SharedMemoryManager()NEWLINE manager.connect()NEWLINE with pytest.raises(TimeoutError):NEWLINE with manager.get_lock("test_lock"): # This will timeout as the parent process has already acquired this.NEWLINE passNEWLINENEWLINE client = Process(target=client_func2)NEWLINENEWLINE with SharedMemoryManager() as manager:NEWLINE assert manager._state.value == State.STARTED # Oddly manager._state is State.Started doesn't work.NEWLINE with manager.get_lock("test_lock", timeout=TIMEOUT) as is_locked:NEWLINE assert is_lockedNEWLINE client.start()NEWLINE client.join()NEWLINENEWLINENEWLINEdef client_barrier(sleep, parties, a_list, name_mangle=False):NEWLINE manager = SharedMemoryManager()NEWLINE manager.connect()NEWLINE name = f"test_barrier_{sleep}" if name_mangle else "test_barrier"NEWLINE barrier = manager.get_barrier(name, parties)NEWLINE t0 = time.time()NEWLINE time.sleep(sleep)NEWLINE barrier.wait(timeout=TIMEOUT) # NOTE: The barrier release order is not guaranteed.NEWLINE a_list.append(np.rint(time.time() - t0))NEWLINENEWLINENEWLINEdef test_single_barrier():NEWLINE with SharedMemoryManager() as manager:NEWLINE a_list = manager.list()NEWLINENEWLINE clients = [Process(target=client_barrier, args=(6, 3, a_list)),NEWLINE Process(target=client_barrier, args=(0, 3, a_list)),NEWLINE Process(target=client_barrier, args=(0, 3, a_list))]NEWLINENEWLINE for client in clients:NEWLINE client.start()NEWLINENEWLINE for client in clients:NEWLINE client.join()NEWLINENEWLINE # We Expect to see that the timer wrapping the sleep and the barrier for each client to be that of the longest.NEWLINE assert a_list._getvalue() == [6, 6, 6], a_list._getvalue()NEWLINENEWLINENEWLINEdef test_multiple_barriers():NEWLINE with SharedMemoryManager() as manager:NEWLINE a_list = manager.list()NEWLINENEWLINE clients = [Process(target=client_barrier, args=(6, 1, a_list, True)),NEWLINE Process(target=client_barrier, args=(0, 1, a_list, True)),NEWLINE Process(target=client_barrier, args=(0, 1, a_list, True))]NEWLINENEWLINE for client in clients:NEWLINE client.start()NEWLINENEWLINE for client in clients:NEWLINE client.join()NEWLINENEWLINE # We Expect to see that the timer wrapping the sleep and the barrier for each client to be that of their sleep.NEWLINE assert a_list._getvalue() == [0, 0, 6], a_list._getvalue()NEWLINENEWLINENEWLINEdef test_broken_barrier():NEWLINE with SharedMemoryManager() as manager:NEWLINE a_list = manager.list()NEWLINENEWLINE # More parties than process will cause barrier.wait() to timeout.NEWLINE client = Process(target=client_barrier, args=(6, 3, a_list))NEWLINE client.start()NEWLINE with pytest.raises(BrokenBarrierError):NEWLINE client.join()NEWLINE
import datetimeNEWLINEimport mockNEWLINEimport pytestNEWLINEfrom urllib.request import HTTPErrorNEWLINENEWLINEfrom marshmallow.exceptions import ValidationErrorNEWLINEimport responsesNEWLINEfrom werkzeug.security import generate_password_hash, check_password_hashNEWLINENEWLINEfrom database.models import AttestationTypesNEWLINEfrom database.models import AttestationNEWLINEfrom logic.attestation_service import (NEWLINE VerificationService,NEWLINE VerificationServiceResponseNEWLINE)NEWLINEfrom logic.attestation_service import CLAIM_TYPESNEWLINEfrom logic.attestation_service import twitter_access_token_urlNEWLINEfrom logic.attestation_service import twitter_request_token_urlNEWLINEfrom logic.service_utils import (NEWLINE AirbnbVerificationError,NEWLINE EmailVerificationError,NEWLINE FacebookVerificationError,NEWLINE PhoneVerificationError,NEWLINE TwitterVerificationError,NEWLINE)NEWLINEfrom tests.helpers.eth_utils import sample_eth_address, str_ethNEWLINENEWLINENEWLINESIGNATURE_LENGTH = 132NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_send_phone_verification_success():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE status=200NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE response = VerificationService.send_phone_verification(**args)NEWLINE assert isinstance(response, VerificationServiceResponse)NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_send_phone_verification_invalid_number():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE json={'error_code': '60033'},NEWLINE status=400NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '1234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.send_phone_verification(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]) == 'Phone number is invalid.'NEWLINE assert(validation_err.value.field_names[0]) == 'phone'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_send_phone_verification_cant_sms_landline():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE json={'error_code': '60082'},NEWLINE status=403NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '1234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.send_phone_verification(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]) == 'Cannot send SMS to landline.'NEWLINE assert(validation_err.value.field_names[0]) == 'phone'NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_send_phone_verification_twilio_error():NEWLINE responses.add(NEWLINE responses.POST,NEWLINE 'https://api.authy.com/protected/json/phones/verification/start',NEWLINE json={'error_code': '60060'}, # Account is suspendedNEWLINE status=503NEWLINE )NEWLINENEWLINE args = {NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '1234',NEWLINE 'method': 'sms',NEWLINE 'locale': NoneNEWLINE }NEWLINE with pytest.raises(PhoneVerificationError) as service_err:NEWLINE VerificationService.send_phone_verification(**args)NEWLINENEWLINE assert(str(service_err.value)) == \NEWLINE 'Could not send verification code. Please try again shortly.'NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_verify_phone_valid_code(app):NEWLINE responses.add(NEWLINE responses.GET,NEWLINE 'https://api.authy.com/protected/json/phones/verification/check',NEWLINE json={NEWLINE 'message': 'Verification code is correct.',NEWLINE 'success': TrueNEWLINE }NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'code': '123456'NEWLINE }NEWLINE with app.test_request_context():NEWLINE response = VerificationService.verify_phone(**args)NEWLINE assert isinstance(response, VerificationServiceResponse)NEWLINENEWLINE assert len(response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert response.data['claim_type'] == CLAIM_TYPES['phone']NEWLINE assert response.data['data'] == 'phone verified'NEWLINENEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.PHONENEWLINE assert(attestations[0].value) == "1 12341234"NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_verify_phone_expired_code():NEWLINE responses.add(NEWLINE responses.GET,NEWLINE 'https://api.authy.com/protected/json/phones/verification/check',NEWLINE json={'error_code': '60023'}, # No pending verificationNEWLINE status=404NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'code': '123456'NEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_phone(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code has expired.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_verify_phone_invalid_code():NEWLINE responses.add(NEWLINE responses.GET,NEWLINE 'https://api.authy.com/protected/json/phones/verification/check',NEWLINE json={'error_code': '60022'}, # No pending verificationNEWLINE status=401NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'country_calling_code': '1',NEWLINE 'phone': '12341234',NEWLINE 'code': 'garbage'NEWLINE }NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_phone(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code is incorrect.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service._send_email_using_sendgrid')NEWLINE@mock.patch('logic.attestation_service.datetime')NEWLINEdef test_send_email_verification(NEWLINE mock_datetime,NEWLINE mock_send_email_using_sendgrid):NEWLINE mock_send_email_using_sendgrid.return_value = TrueNEWLINENEWLINE now = datetime.datetime.utcnow()NEWLINE expire_in = datetime.timedelta(minutes=30)NEWLINE mock_datetime.datetime.utcnow.return_value = nowNEWLINE mock_datetime.timedelta.return_value = expire_inNEWLINENEWLINE email = 'origin@protocol.foo'NEWLINE with mock.patch('logic.attestation_service.session', dict()) as session:NEWLINE response = VerificationService.send_email_verification(email)NEWLINE assert isinstance(response, VerificationServiceResponse)NEWLINE assert 'email_attestation' in sessionNEWLINE assert len(session['email_attestation']['code']) == 6NEWLINE assert session['email_attestation']['expiry'] == now + expire_inNEWLINE assert check_password_hash(NEWLINE session['email_attestation']['email'], emailNEWLINE )NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service._send_email_using_sendgrid')NEWLINEdef test_send_email_verification_sendgrid_error(NEWLINE mock_send_email_using_sendgrid):NEWLINE mock_send_email_using_sendgrid.side_effect = AttributeErrorNEWLINENEWLINE with mock.patch('logic.attestation_service.session', dict()):NEWLINE with pytest.raises(EmailVerificationError) as service_err:NEWLINE VerificationService.send_email_verification('origin@protocol.foo')NEWLINENEWLINE assert(str(service_err.value)) == \NEWLINE 'Could not send verification code. Please try again shortly.'NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.session')NEWLINEdef test_verify_email_valid_code(mock_session, app):NEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('origin@protocol.foo'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': 'origin@protocol.foo',NEWLINE 'code': '12345'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with app.test_request_context():NEWLINE response = VerificationService.verify_email(**args)NEWLINENEWLINE assert isinstance(response, VerificationServiceResponse)NEWLINENEWLINE assert len(response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert response.data['claim_type'] == CLAIM_TYPES['email']NEWLINE assert response.data['data'] == 'email verified'NEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.EMAILNEWLINE assert(attestations[0].value) == "origin@protocol.foo"NEWLINENEWLINENEWLINEdef test_verify_email_expired_code():NEWLINE # Mock a session object with an expiry time in the pastNEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('origin@protocol.foo'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() - datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'email': 'origin@protocol.foo',NEWLINE 'code': '12345',NEWLINE 'eth_address': str_eth(sample_eth_address)NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code has expired.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.session')NEWLINEdef test_verify_email_invalid_code(mock_session):NEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('origin@protocol.foo'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': 'origin@protocol.foo',NEWLINE 'code': '54321'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(ValidationError) as validation_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(validation_err.value.messages[0]NEWLINE ) == 'Verification code is incorrect.'NEWLINE assert(validation_err.value.field_names[0]) == 'code'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_verify_email_no_verification_sent():NEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': 'origin@protocol.foo',NEWLINE 'code': '54321'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', dict()):NEWLINE with pytest.raises(EmailVerificationError) as verification_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(verification_err.value.message) == \NEWLINE 'No verification code was found.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_verify_email_invalid_email():NEWLINE session_dict = {NEWLINE 'email_attestation': {NEWLINE 'email': generate_password_hash('not_origin@protocol.foo'),NEWLINE 'code': '12345',NEWLINE 'expiry': datetime.datetime.utcnow() + datetime.timedelta(minutes=30)NEWLINE }NEWLINE }NEWLINENEWLINE args = {NEWLINE 'eth_address': str_eth(sample_eth_address),NEWLINE 'email': 'origin@protocol.foo',NEWLINE 'code': '54321'NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(EmailVerificationError) as verification_err:NEWLINE VerificationService.verify_email(**args)NEWLINENEWLINE assert(verification_err.value.message) == \NEWLINE 'No verification code was found for that email.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_facebook_auth_url():NEWLINE resp = VerificationService.facebook_auth_url()NEWLINE resp_data = resp.dataNEWLINE assert resp_data['url'] == (NEWLINE 'https://www.facebook.com/v2.12/dialog/oauth?client_id'NEWLINE '=facebook-client-id&redirect_uri'NEWLINE '=https://testhost.com/redirects/facebook/'NEWLINE )NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_verify_facebook_valid_code(app):NEWLINE auth_url = 'https://graph.facebook.com/v2.12/oauth/access_token' + \NEWLINE '?client_id=facebook-client-id' + \NEWLINE '&client_secret=facebook-client-secret' + \NEWLINE '&redirect_uri=https%3A%2F%2Ftesthost.com%2Fredirects%2Ffacebook%2F' + \NEWLINE '&code=abcde12345'NEWLINE verify_url = 'https://graph.facebook.com/me?access_token=12345'NEWLINENEWLINE responses.add(NEWLINE responses.GET,NEWLINE auth_url,NEWLINE json={'access_token': 12345},NEWLINE status=200NEWLINE )NEWLINENEWLINE responses.add(NEWLINE responses.GET,NEWLINE verify_url,NEWLINE json={'name': 'Origin Protocol'},NEWLINE status=200NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'code': 'abcde12345'NEWLINE }NEWLINENEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.verify_facebook(**args)NEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINE assert len(verification_response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert verification_response.data['claim_type'] == CLAIM_TYPES['facebook']NEWLINE assert verification_response.data['data'] == 'facebook verified'NEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.FACEBOOKNEWLINE assert(attestations[0].value) == 'Origin Protocol'NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_verify_facebook_invalid_code():NEWLINE auth_url = 'https://graph.facebook.com/v2.12/oauth/access_token' + \NEWLINE '?client_id=facebook-client-id' + \NEWLINE '&client_secret=facebook-client-secret' + \NEWLINE '&redirect_uri=https%3A%2F%2Ftesthost.com%2Fredirects%2Ffacebook%2F' + \NEWLINE '&code=bananas'NEWLINENEWLINE responses.add(NEWLINE responses.GET,NEWLINE auth_url,NEWLINE json={'error': 'invalid'},NEWLINE status=403NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'code': 'bananas'NEWLINE }NEWLINENEWLINE with pytest.raises(FacebookVerificationError) as service_err:NEWLINE VerificationService.verify_facebook(**args)NEWLINENEWLINE assert str(service_err.value) == 'The code you provided is invalid.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@responses.activateNEWLINEdef test_twitter_auth_url(app):NEWLINE response_content = b'oauth_token=peaches&oauth_token_secret=pears'NEWLINENEWLINE responses.add(NEWLINE responses.POST,NEWLINE twitter_request_token_url,NEWLINE body=response_content,NEWLINE status=200NEWLINE )NEWLINENEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.twitter_auth_url()NEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINE assert verification_response.data['url'] == (NEWLINE 'https://api.twitter.com/oauth/authenticate?oauth_token=peaches'NEWLINE )NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.session')NEWLINE@responses.activateNEWLINEdef test_verify_twitter_valid_code(mock_session, app):NEWLINE responses.add(NEWLINE responses.POST,NEWLINE twitter_access_token_url,NEWLINE body=b'screen_name=originprotocol',NEWLINE status=200NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'oauth_verifier': 'blueberries'NEWLINE }NEWLINENEWLINE session_dict = {NEWLINE 'request_token': {NEWLINE 'oauth_token': '1234',NEWLINE 'oauth_token_secret': '5678'NEWLINE }NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.verify_twitter(**args)NEWLINENEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINENEWLINE assert len(verification_response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert verification_response.data['claim_type'] == CLAIM_TYPES['twitter']NEWLINE assert verification_response.data['data'] == 'twitter verified'NEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.TWITTERNEWLINE assert(attestations[0].value) == 'originprotocol'NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.session')NEWLINE@responses.activateNEWLINEdef test_verify_twitter_invalid_verifier(mock_session, app):NEWLINE responses.add(NEWLINE responses.POST,NEWLINE twitter_access_token_url,NEWLINE status=401NEWLINE )NEWLINENEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'oauth_verifier': 'pineapples'NEWLINE }NEWLINENEWLINE session_dict = {NEWLINE 'request_token': {NEWLINE 'oauth_token': '1234',NEWLINE 'oauth_token_secret': '5678'NEWLINE }NEWLINE }NEWLINENEWLINE with mock.patch('logic.attestation_service.session', session_dict):NEWLINE with pytest.raises(TwitterVerificationError) as service_err:NEWLINE with app.test_request_context():NEWLINE VerificationService.verify_twitter(**args)NEWLINENEWLINE assert str(service_err.value) == 'The verifier you provided is invalid.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.requests')NEWLINE@mock.patch('logic.attestation_service.session')NEWLINEdef test_verify_twitter_invalid_session(mock_session, mock_requests):NEWLINE args = {NEWLINE 'eth_address': '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE 'oauth_verifier': 'pineapples'NEWLINE }NEWLINENEWLINE with pytest.raises(TwitterVerificationError) as service_err:NEWLINE VerificationService.verify_twitter(**args)NEWLINENEWLINE assert str(service_err.value) == 'Session not found.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINEdef test_generate_airbnb_verification_code():NEWLINE resp = VerificationService.generate_airbnb_verification_code(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE '123456'NEWLINE )NEWLINE assert isinstance(resp, VerificationServiceResponse)NEWLINENEWLINE assert resp.data['code'] == "art brick aspect accident brass betray antenna"NEWLINENEWLINENEWLINEdef test_generate_airbnb_verification_code_incorrect_user_id_format():NEWLINE with pytest.raises(ValidationError) as validation_error:NEWLINE VerificationService.generate_airbnb_verification_code(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE '12a34'NEWLINE )NEWLINENEWLINE assert str(validation_error.value) == 'AirbnbUserId should be a number.'NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb(mock_urllib_request, app):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile descriptionNEWLINE Origin verification code: art brick aspect accident brass betray antennaNEWLINE some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINE airbnbUserId = "123456"NEWLINENEWLINE with app.test_request_context():NEWLINE verification_response = VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE airbnbUserIdNEWLINE )NEWLINE assert isinstance(verification_response, VerificationServiceResponse)NEWLINENEWLINE assert len(verification_response.data['signature']) == SIGNATURE_LENGTHNEWLINE assert verification_response.data['claim_type'] == CLAIM_TYPES['airbnb']NEWLINE assert verification_response.data['data'] == 'airbnbUserId:' + airbnbUserIdNEWLINENEWLINE # Verify attestation stored in databaseNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 1NEWLINE assert(attestations[0].method) == AttestationTypes.AIRBNBNEWLINE assert(attestations[0].value) == "123456"NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb_verification_code_missing(mock_urllib_request):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile description some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINENEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "123456"NEWLINE )NEWLINENEWLINE assert str(service_err.value) == "Origin verification code: art brick aspect " \NEWLINE + "accident brass betray antenna has not been found in user's Airbnb profile."NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb_verification_code_incorrect(mock_urllib_request):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile descriptionNEWLINE Origin verification code: art brick aspect pimpmobileNEWLINE some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINENEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "123456"NEWLINE )NEWLINENEWLINE assert str(service_err.value) == "Origin verification code: art brick aspect " \NEWLINE + "accident brass betray antenna has not been found in user's Airbnb profile."NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.urlopen')NEWLINEdef test_verify_airbnb_verification_code_incorrect_user_id_format(NEWLINE mock_urllib_request):NEWLINE mock_urllib_request.return_value.read.return_value = """NEWLINE <html><div>NEWLINE Airbnb profile descriptionNEWLINE Origin verification code: art brick aspect accident brass betray antennaNEWLINE some more profile descriptionNEWLINE </div></html>""".encode('utf-8')NEWLINENEWLINE with pytest.raises(ValidationError) as validation_error:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "12a34"NEWLINE )NEWLINENEWLINE assert str(validation_error.value) == 'AirbnbUserId should be a number.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.urlopen', side_effect=HTTPError(NEWLINE 'https://www.airbnb.com/users/show/99999999999999999',NEWLINE 404,NEWLINE "User not found",NEWLINE {},NEWLINE {}NEWLINE))NEWLINEdef test_verify_airbnb_verification_code_non_existing_user(NEWLINE mock_urllib_request):NEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "99999999999999999"NEWLINE )NEWLINENEWLINE assert str(NEWLINE service_err.value) == 'Airbnb user id: 99999999999999999 not found.'NEWLINENEWLINE # Verify attestation not storedNEWLINE attestations = Attestation.query.all()NEWLINE assert(len(attestations)) == 0NEWLINENEWLINENEWLINE@mock.patch('logic.attestation_service.urlopen', side_effect=HTTPError(NEWLINE 'https://www.airbnb.com/users/show/123',NEWLINE 500,NEWLINE "Internal server error",NEWLINE {},NEWLINE {}NEWLINE))NEWLINEdef test_verify_airbnb_verification_code_internal_server_error(NEWLINE mock_urllib_request):NEWLINE with pytest.raises(AirbnbVerificationError) as service_err:NEWLINE VerificationService.verify_airbnb(NEWLINE '0x112234455C3a32FD11230C42E7Bccd4A84e02010',NEWLINE "123"NEWLINE )NEWLINENEWLINE assert str(service_err.value) == "Can not fetch user's Airbnb profile."NEWLINE
# encoding: utf-8NEWLINE"""Event loop integration for the ZeroMQ-based kernels."""NEWLINENEWLINE# Copyright (c) IPython Development Team.NEWLINE# Distributed under the terms of the Modified BSD License.NEWLINENEWLINEfrom functools import partialNEWLINEimport osNEWLINEimport sysNEWLINEimport platformNEWLINENEWLINEimport zmqNEWLINENEWLINEfrom distutils.version import LooseVersion as VNEWLINEfrom traitlets.config.application import ApplicationNEWLINENEWLINENEWLINEdef _use_appnope():NEWLINE """Should we use appnope for dealing with OS X app nap?NEWLINENEWLINE Checks if we are on OS X 10.9 or greater.NEWLINE """NEWLINE return sys.platform == 'darwin' and V(platform.mac_ver()[0]) >= V('10.9')NEWLINENEWLINENEWLINEdef _notify_stream_qt(kernel, stream):NEWLINENEWLINE from IPython.external.qt_for_kernel import QtCoreNEWLINENEWLINE def process_stream_events():NEWLINE """fall back to main loop when there's a socket event"""NEWLINE # call flush to ensure that the stream doesn't lose eventsNEWLINE # due to our consuming of the edge-triggered FDNEWLINE # flush returns the number of events consumed.NEWLINE # if there were any, wake it upNEWLINE if stream.flush(limit=1):NEWLINE notifier.setEnabled(False)NEWLINE kernel.app.quit()NEWLINENEWLINE fd = stream.getsockopt(zmq.FD)NEWLINE notifier = QtCore.QSocketNotifier(fd, QtCore.QSocketNotifier.Read, kernel.app)NEWLINE notifier.activated.connect(process_stream_events)NEWLINE # there may already be unprocessed events waiting.NEWLINE # these events will not wake zmq's edge-triggered FDNEWLINE # since edge-triggered notification only occurs on new i/o activity.NEWLINE # process all the waiting events immediatelyNEWLINE # so we start in a clean state ensuring that any new i/o events will notify.NEWLINE # schedule first call on the eventloop as soon as it's running,NEWLINE # so we don't block here processing eventsNEWLINE timer = QtCore.QTimer(kernel.app)NEWLINE timer.setSingleShot(True)NEWLINE timer.timeout.connect(process_stream_events)NEWLINE timer.start(0)NEWLINENEWLINE# mapping of keys to loop functionsNEWLINEloop_map = {NEWLINE 'inline': None,NEWLINE 'nbagg': None,NEWLINE 'notebook': None,NEWLINE 'ipympl': None,NEWLINE 'widget': None,NEWLINE None: None,NEWLINE}NEWLINENEWLINEdef register_integration(*toolkitnames):NEWLINE """Decorator to register an event loop to integrate with the IPython kernelNEWLINENEWLINE The decorator takes names to register the event loop as for the %gui magic.NEWLINE You can provide alternative names for the same toolkit.NEWLINENEWLINE The decorated function should take a single argument, the IPython kernelNEWLINE instance, arrange for the event loop to call ``kernel.do_one_iteration()``NEWLINE at least every ``kernel._poll_interval`` seconds, and start the event loop.NEWLINENEWLINE :mod:`ipykernel.eventloops` provides and registers such functionsNEWLINE for a few common event loops.NEWLINE """NEWLINE def decorator(func):NEWLINE for name in toolkitnames:NEWLINE loop_map[name] = funcNEWLINENEWLINE func.exit_hook = lambda kernel: NoneNEWLINENEWLINE def exit_decorator(exit_func):NEWLINE """@func.exit is now a decoratorNEWLINENEWLINE to register a function to be called on exitNEWLINE """NEWLINE func.exit_hook = exit_funcNEWLINE return exit_funcNEWLINENEWLINE func.exit = exit_decoratorNEWLINE return funcNEWLINENEWLINE return decoratorNEWLINENEWLINENEWLINEdef _loop_qt(app):NEWLINE """Inner-loop for running the Qt eventloopNEWLINENEWLINE Pulled from guisupport.start_event_loop in IPython < 5.2,NEWLINE since IPython 5.2 only checks `get_ipython().active_eventloop` is defined,NEWLINE rather than if the eventloop is actually running.NEWLINE """NEWLINE app._in_event_loop = TrueNEWLINE app.exec_()NEWLINE app._in_event_loop = FalseNEWLINENEWLINENEWLINE@register_integration('qt4')NEWLINEdef loop_qt4(kernel):NEWLINE """Start a kernel with PyQt4 event loop integration."""NEWLINENEWLINE from IPython.lib.guisupport import get_app_qt4NEWLINENEWLINE kernel.app = get_app_qt4([" "])NEWLINE kernel.app.setQuitOnLastWindowClosed(False)NEWLINENEWLINE # Only register the eventloop for the shell stream because doingNEWLINE # it for the control stream is generating a bunch of unnecessaryNEWLINE # warnings on Windows.NEWLINE _notify_stream_qt(kernel, kernel.shell_streams[0])NEWLINENEWLINE _loop_qt(kernel.app)NEWLINENEWLINENEWLINE@register_integration('qt', 'qt5')NEWLINEdef loop_qt5(kernel):NEWLINE """Start a kernel with PyQt5 event loop integration."""NEWLINE os.environ['QT_API'] = 'pyqt5'NEWLINE return loop_qt4(kernel)NEWLINENEWLINENEWLINE# exit and watch are the same for qt 4 and 5NEWLINE@loop_qt4.exitNEWLINE@loop_qt5.exitNEWLINEdef loop_qt_exit(kernel):NEWLINE kernel.app.exit()NEWLINENEWLINENEWLINEdef _loop_wx(app):NEWLINE """Inner-loop for running the Wx eventloopNEWLINENEWLINE Pulled from guisupport.start_event_loop in IPython < 5.2,NEWLINE since IPython 5.2 only checks `get_ipython().active_eventloop` is defined,NEWLINE rather than if the eventloop is actually running.NEWLINE """NEWLINE app._in_event_loop = TrueNEWLINE app.MainLoop()NEWLINE app._in_event_loop = FalseNEWLINENEWLINENEWLINE@register_integration('wx')NEWLINEdef loop_wx(kernel):NEWLINE """Start a kernel with wx event loop support."""NEWLINENEWLINE import wxNEWLINENEWLINE # Wx uses millisecondsNEWLINE poll_interval = int(1000 * kernel._poll_interval)NEWLINENEWLINE def wake():NEWLINE """wake from wx"""NEWLINE for stream in kernel.shell_streams:NEWLINE if stream.flush(limit=1):NEWLINE kernel.app.ExitMainLoop()NEWLINE returnNEWLINENEWLINE # We have to put the wx.Timer in a wx.Frame for it to fire properly.NEWLINE # We make the Frame hidden when we create it in the main app below.NEWLINE class TimerFrame(wx.Frame):NEWLINE def __init__(self, func):NEWLINE wx.Frame.__init__(self, None, -1)NEWLINE self.timer = wx.Timer(self)NEWLINE # Units for the timer are in millisecondsNEWLINE self.timer.Start(poll_interval)NEWLINE self.Bind(wx.EVT_TIMER, self.on_timer)NEWLINE self.func = funcNEWLINENEWLINE def on_timer(self, event):NEWLINE self.func()NEWLINENEWLINE # We need a custom wx.App to create our Frame subclass that has theNEWLINE # wx.Timer to defer back to the tornado event loop.NEWLINE class IPWxApp(wx.App):NEWLINE def OnInit(self):NEWLINE self.frame = TimerFrame(wake)NEWLINE self.frame.Show(False)NEWLINE return TrueNEWLINENEWLINE # The redirect=False here makes sure that wx doesn't replaceNEWLINE # sys.stdout/stderr with its own classes.NEWLINE if not (NEWLINE getattr(kernel, 'app', None)NEWLINE and isinstance(kernel.app, wx.App)NEWLINE ):NEWLINE kernel.app = IPWxApp(redirect=False)NEWLINENEWLINE # The import of wx on Linux sets the handler for signal.SIGINTNEWLINE # to 0. This is a bug in wx or gtk. We fix by just setting itNEWLINE # back to the Python default.NEWLINE import signalNEWLINE if not callable(signal.getsignal(signal.SIGINT)):NEWLINE signal.signal(signal.SIGINT, signal.default_int_handler)NEWLINENEWLINE _loop_wx(kernel.app)NEWLINENEWLINENEWLINE@loop_wx.exitNEWLINEdef loop_wx_exit(kernel):NEWLINE import wxNEWLINE wx.Exit()NEWLINENEWLINENEWLINE@register_integration('tk')NEWLINEdef loop_tk(kernel):NEWLINE """Start a kernel with the Tk event loop."""NEWLINENEWLINE from tkinter import Tk, READABLENEWLINENEWLINE app = Tk()NEWLINE # Capability detection:NEWLINE # per https://docs.python.org/3/library/tkinter.html#file-handlersNEWLINE # file handlers are not available on WindowsNEWLINE if hasattr(app, 'createfilehandler'):NEWLINE # A basic wrapper for structural similarity with the Windows versionNEWLINE class BasicAppWrapper(object):NEWLINE def __init__(self, app):NEWLINE self.app = appNEWLINE self.app.withdraw()NEWLINENEWLINE def process_stream_events(stream, *a, **kw):NEWLINE """fall back to main loop when there's a socket event"""NEWLINE if stream.flush(limit=1):NEWLINE app.tk.deletefilehandler(stream.getsockopt(zmq.FD))NEWLINE app.quit()NEWLINENEWLINE # For Tkinter, we create a Tk object and call its withdraw method.NEWLINE kernel.app_wrapper = BasicAppWrapper(app)NEWLINENEWLINE for stream in kernel.shell_streams:NEWLINE notifier = partial(process_stream_events, stream)NEWLINE # seems to be needed for tkNEWLINE notifier.__name__ = "notifier"NEWLINE app.tk.createfilehandler(stream.getsockopt(zmq.FD), READABLE, notifier)NEWLINE # schedule initial call after startNEWLINE app.after(0, notifier)NEWLINENEWLINE app.mainloop()NEWLINENEWLINE else:NEWLINE doi = kernel.do_one_iterationNEWLINE # Tk uses millisecondsNEWLINE poll_interval = int(1000 * kernel._poll_interval)NEWLINENEWLINE class TimedAppWrapper(object):NEWLINE def __init__(self, app, func):NEWLINE self.app = appNEWLINE self.app.withdraw()NEWLINE self.func = funcNEWLINENEWLINE def on_timer(self):NEWLINE self.func()NEWLINE self.app.after(poll_interval, self.on_timer)NEWLINENEWLINE def start(self):NEWLINE self.on_timer() # Call it once to get things going.NEWLINE self.app.mainloop()NEWLINENEWLINE kernel.app_wrapper = TimedAppWrapper(app, doi)NEWLINE kernel.app_wrapper.start()NEWLINENEWLINENEWLINE@loop_tk.exitNEWLINEdef loop_tk_exit(kernel):NEWLINE kernel.app_wrapper.app.destroy()NEWLINENEWLINENEWLINE@register_integration('gtk')NEWLINEdef loop_gtk(kernel):NEWLINE """Start the kernel, coordinating with the GTK event loop"""NEWLINE from .gui.gtkembed import GTKEmbedNEWLINENEWLINE gtk_kernel = GTKEmbed(kernel)NEWLINE gtk_kernel.start()NEWLINE kernel._gtk = gtk_kernelNEWLINENEWLINENEWLINE@loop_gtk.exitNEWLINEdef loop_gtk_exit(kernel):NEWLINE kernel._gtk.stop()NEWLINENEWLINENEWLINE@register_integration('gtk3')NEWLINEdef loop_gtk3(kernel):NEWLINE """Start the kernel, coordinating with the GTK event loop"""NEWLINE from .gui.gtk3embed import GTKEmbedNEWLINENEWLINE gtk_kernel = GTKEmbed(kernel)NEWLINE gtk_kernel.start()NEWLINE kernel._gtk = gtk_kernelNEWLINENEWLINENEWLINE@loop_gtk3.exitNEWLINEdef loop_gtk3_exit(kernel):NEWLINE kernel._gtk.stop()NEWLINENEWLINENEWLINE@register_integration('osx')NEWLINEdef loop_cocoa(kernel):NEWLINE """Start the kernel, coordinating with the Cocoa CFRunLoop event loopNEWLINE via the matplotlib MacOSX backend.NEWLINE """NEWLINE from ._eventloop_macos import mainloop, stopNEWLINENEWLINE real_excepthook = sys.excepthookNEWLINE def handle_int(etype, value, tb):NEWLINE """don't let KeyboardInterrupts look like crashes"""NEWLINE # wake the eventloop when we get a signalNEWLINE stop()NEWLINE if etype is KeyboardInterrupt:NEWLINE print("KeyboardInterrupt caught in CFRunLoop", file=sys.__stdout__)NEWLINE else:NEWLINE real_excepthook(etype, value, tb)NEWLINENEWLINE while not kernel.shell.exit_now:NEWLINE try:NEWLINE # double nested try/except, to properly catch KeyboardInterruptNEWLINE # due to pyzmq Issue #130NEWLINE try:NEWLINE # don't let interrupts during mainloop invoke crash_handler:NEWLINE sys.excepthook = handle_intNEWLINE mainloop(kernel._poll_interval)NEWLINE for stream in kernel.shell_streams:NEWLINE if stream.flush(limit=1):NEWLINE # events to process, return control to kernelNEWLINE returnNEWLINE except:NEWLINE raiseNEWLINE except KeyboardInterrupt:NEWLINE # Ctrl-C shouldn't crash the kernelNEWLINE print("KeyboardInterrupt caught in kernel", file=sys.__stdout__)NEWLINE finally:NEWLINE # ensure excepthook is restoredNEWLINE sys.excepthook = real_excepthookNEWLINENEWLINENEWLINE@loop_cocoa.exitNEWLINEdef loop_cocoa_exit(kernel):NEWLINE from ._eventloop_macos import stopNEWLINE stop()NEWLINENEWLINENEWLINE@register_integration('asyncio')NEWLINEdef loop_asyncio(kernel):NEWLINE '''Start a kernel with asyncio event loop support.'''NEWLINE import asyncioNEWLINE loop = asyncio.get_event_loop()NEWLINE # loop is already running (e.g. tornado 5), nothing left to doNEWLINE if loop.is_running():NEWLINE returnNEWLINENEWLINE if loop.is_closed():NEWLINE # main loop is closed, create a new oneNEWLINE loop = asyncio.new_event_loop()NEWLINE asyncio.set_event_loop(loop)NEWLINE loop._should_close = FalseNEWLINENEWLINE # pause eventloop when there's an event on a zmq socketNEWLINE def process_stream_events(stream):NEWLINE """fall back to main loop when there's a socket event"""NEWLINE if stream.flush(limit=1):NEWLINE loop.stop()NEWLINENEWLINE for stream in kernel.shell_streams:NEWLINE fd = stream.getsockopt(zmq.FD)NEWLINE notifier = partial(process_stream_events, stream)NEWLINE loop.add_reader(fd, notifier)NEWLINE loop.call_soon(notifier)NEWLINENEWLINE while True:NEWLINE error = NoneNEWLINE try:NEWLINE loop.run_forever()NEWLINE except KeyboardInterrupt:NEWLINE continueNEWLINE except Exception as e:NEWLINE error = eNEWLINE if loop._should_close:NEWLINE loop.close()NEWLINE if error is not None:NEWLINE raise errorNEWLINE breakNEWLINENEWLINENEWLINE@loop_asyncio.exitNEWLINEdef loop_asyncio_exit(kernel):NEWLINE """Exit hook for asyncio"""NEWLINE import asyncioNEWLINE loop = asyncio.get_event_loop()NEWLINENEWLINE @asyncio.coroutineNEWLINE def close_loop():NEWLINE if hasattr(loop, 'shutdown_asyncgens'):NEWLINE yield from loop.shutdown_asyncgens()NEWLINE loop._should_close = TrueNEWLINE loop.stop()NEWLINENEWLINE if loop.is_running():NEWLINE close_loop()NEWLINENEWLINE elif not loop.is_closed():NEWLINE loop.run_until_complete(close_loop)NEWLINE loop.close()NEWLINENEWLINENEWLINEdef enable_gui(gui, kernel=None):NEWLINE """Enable integration with a given GUI"""NEWLINE if gui not in loop_map:NEWLINE e = "Invalid GUI request %r, valid ones are:%s" % (gui, loop_map.keys())NEWLINE raise ValueError(e)NEWLINE if kernel is None:NEWLINE if Application.initialized():NEWLINE kernel = getattr(Application.instance(), 'kernel', None)NEWLINE if kernel is None:NEWLINE raise RuntimeError("You didn't specify a kernel,"NEWLINE " and no IPython Application with a kernel appears to be running."NEWLINE )NEWLINE loop = loop_map[gui]NEWLINE if loop and kernel.eventloop is not None and kernel.eventloop is not loop:NEWLINE raise RuntimeError("Cannot activate multiple GUI eventloops")NEWLINE kernel.eventloop = loopNEWLINE
# mock.pyNEWLINE# Test tools for mocking and patching.NEWLINE# Maintained by Michael FoordNEWLINE# Backport for other versions of Python available fromNEWLINE# http://pypi.python.org/pypi/mockNEWLINENEWLINE__all__ = (NEWLINE 'Mock',NEWLINE 'MagicMock',NEWLINE 'patch',NEWLINE 'sentinel',NEWLINE 'DEFAULT',NEWLINE 'ANY',NEWLINE 'call',NEWLINE 'create_autospec',NEWLINE 'FILTER_DIR',NEWLINE 'NonCallableMock',NEWLINE 'NonCallableMagicMock',NEWLINE 'mock_open',NEWLINE 'PropertyMock',NEWLINE)NEWLINENEWLINENEWLINE__version__ = '1.0'NEWLINENEWLINENEWLINEimport inspectNEWLINEimport pprintNEWLINEimport sysNEWLINEimport builtinsNEWLINEfrom types import ModuleTypeNEWLINEfrom functools import wraps, partialNEWLINENEWLINENEWLINE_builtins = {name for name in dir(builtins) if not name.startswith('_')}NEWLINENEWLINEBaseExceptions = (BaseException,)NEWLINEif 'java' in sys.platform:NEWLINE # jythonNEWLINE import javaNEWLINE BaseExceptions = (BaseException, java.lang.Throwable)NEWLINENEWLINENEWLINEFILTER_DIR = TrueNEWLINENEWLINE# Workaround for issue #12370NEWLINE# Without this, the __class__ properties wouldn't be set correctlyNEWLINE_safe_super = superNEWLINENEWLINEdef _is_instance_mock(obj):NEWLINE # can't use isinstance on Mock objects because they override __class__NEWLINE # The base class for all mocks is NonCallableMockNEWLINE return issubclass(type(obj), NonCallableMock)NEWLINENEWLINENEWLINEdef _is_exception(obj):NEWLINE return (NEWLINE isinstance(obj, BaseExceptions) orNEWLINE isinstance(obj, type) and issubclass(obj, BaseExceptions)NEWLINE )NEWLINENEWLINENEWLINEclass _slotted(object):NEWLINE __slots__ = ['a']NEWLINENEWLINENEWLINEDescriptorTypes = (NEWLINE type(_slotted.a),NEWLINE property,NEWLINE)NEWLINENEWLINENEWLINEdef _get_signature_object(func, as_instance, eat_self):NEWLINE """NEWLINE Given an arbitrary, possibly callable object, try to create a suitableNEWLINE signature object.NEWLINE Return a (reduced func, signature) tuple, or None.NEWLINE """NEWLINE if isinstance(func, type) and not as_instance:NEWLINE # If it's a type and should be modelled as a type, use __init__.NEWLINE try:NEWLINE func = func.__init__NEWLINE except AttributeError:NEWLINE return NoneNEWLINE # Skip the `self` argument in __init__NEWLINE eat_self = TrueNEWLINE elif not isinstance(func, FunctionTypes):NEWLINE # If we really want to model an instance of the passed type,NEWLINE # __call__ should be looked up, not __init__.NEWLINE try:NEWLINE func = func.__call__NEWLINE except AttributeError:NEWLINE return NoneNEWLINE if eat_self:NEWLINE sig_func = partial(func, None)NEWLINE else:NEWLINE sig_func = funcNEWLINE try:NEWLINE return func, inspect.signature(sig_func)NEWLINE except ValueError:NEWLINE # Certain callable types are not supported by inspect.signature()NEWLINE return NoneNEWLINENEWLINENEWLINEdef _check_signature(func, mock, skipfirst, instance=False):NEWLINE sig = _get_signature_object(func, instance, skipfirst)NEWLINE if sig is None:NEWLINE returnNEWLINE func, sig = sigNEWLINE def checksig(_mock_self, *args, **kwargs):NEWLINE sig.bind(*args, **kwargs)NEWLINE _copy_func_details(func, checksig)NEWLINE type(mock)._mock_check_sig = checksigNEWLINENEWLINENEWLINEdef _copy_func_details(func, funcopy):NEWLINE funcopy.__name__ = func.__name__NEWLINE funcopy.__doc__ = func.__doc__NEWLINE try:NEWLINE funcopy.__text_signature__ = func.__text_signature__NEWLINE except AttributeError:NEWLINE passNEWLINE # we explicitly don't copy func.__dict__ into this copy as it wouldNEWLINE # expose original attributes that should be mockedNEWLINE try:NEWLINE funcopy.__module__ = func.__module__NEWLINE except AttributeError:NEWLINE passNEWLINE try:NEWLINE funcopy.__defaults__ = func.__defaults__NEWLINE except AttributeError:NEWLINE passNEWLINE try:NEWLINE funcopy.__kwdefaults__ = func.__kwdefaults__NEWLINE except AttributeError:NEWLINE passNEWLINENEWLINENEWLINEdef _callable(obj):NEWLINE if isinstance(obj, type):NEWLINE return TrueNEWLINE if getattr(obj, '__call__', None) is not None:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINEdef _is_list(obj):NEWLINE # checks for list or tuplesNEWLINE # XXXX badly named!NEWLINE return type(obj) in (list, tuple)NEWLINENEWLINENEWLINEdef _instance_callable(obj):NEWLINE """Given an object, return True if the object is callable.NEWLINE For classes, return True if instances would be callable."""NEWLINE if not isinstance(obj, type):NEWLINE # already an instanceNEWLINE return getattr(obj, '__call__', None) is not NoneNEWLINENEWLINE # *could* be broken by a class overriding __mro__ or __dict__ viaNEWLINE # a metaclassNEWLINE for base in (obj,) + obj.__mro__:NEWLINE if base.__dict__.get('__call__') is not None:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINENEWLINEdef _set_signature(mock, original, instance=False):NEWLINE # creates a function with signature (*args, **kwargs) that delegates to aNEWLINE # mock. It still does signature checking by calling a lambda with the sameNEWLINE # signature as the original.NEWLINE if not _callable(original):NEWLINE returnNEWLINENEWLINE skipfirst = isinstance(original, type)NEWLINE result = _get_signature_object(original, instance, skipfirst)NEWLINE if result is None:NEWLINE returnNEWLINE func, sig = resultNEWLINE def checksig(*args, **kwargs):NEWLINE sig.bind(*args, **kwargs)NEWLINE _copy_func_details(func, checksig)NEWLINENEWLINE name = original.__name__NEWLINE if not name.isidentifier():NEWLINE name = 'funcopy'NEWLINE context = {'_checksig_': checksig, 'mock': mock}NEWLINE src = """def %s(*args, **kwargs):NEWLINE _checksig_(*args, **kwargs)NEWLINE return mock(*args, **kwargs)""" % nameNEWLINE exec (src, context)NEWLINE funcopy = context[name]NEWLINE _setup_func(funcopy, mock)NEWLINE return funcopyNEWLINENEWLINENEWLINEdef _setup_func(funcopy, mock):NEWLINE funcopy.mock = mockNEWLINENEWLINE # can't use isinstance with mocksNEWLINE if not _is_instance_mock(mock):NEWLINE returnNEWLINENEWLINE def assert_called_with(*args, **kwargs):NEWLINE return mock.assert_called_with(*args, **kwargs)NEWLINE def assert_called_once_with(*args, **kwargs):NEWLINE return mock.assert_called_once_with(*args, **kwargs)NEWLINE def assert_has_calls(*args, **kwargs):NEWLINE return mock.assert_has_calls(*args, **kwargs)NEWLINE def assert_any_call(*args, **kwargs):NEWLINE return mock.assert_any_call(*args, **kwargs)NEWLINE def reset_mock():NEWLINE funcopy.method_calls = _CallList()NEWLINE funcopy.mock_calls = _CallList()NEWLINE mock.reset_mock()NEWLINE ret = funcopy.return_valueNEWLINE if _is_instance_mock(ret) and not ret is mock:NEWLINE ret.reset_mock()NEWLINENEWLINE funcopy.called = FalseNEWLINE funcopy.call_count = 0NEWLINE funcopy.call_args = NoneNEWLINE funcopy.call_args_list = _CallList()NEWLINE funcopy.method_calls = _CallList()NEWLINE funcopy.mock_calls = _CallList()NEWLINENEWLINE funcopy.return_value = mock.return_valueNEWLINE funcopy.side_effect = mock.side_effectNEWLINE funcopy._mock_children = mock._mock_childrenNEWLINENEWLINE funcopy.assert_called_with = assert_called_withNEWLINE funcopy.assert_called_once_with = assert_called_once_withNEWLINE funcopy.assert_has_calls = assert_has_callsNEWLINE funcopy.assert_any_call = assert_any_callNEWLINE funcopy.reset_mock = reset_mockNEWLINENEWLINE mock._mock_delegate = funcopyNEWLINENEWLINENEWLINEdef _is_magic(name):NEWLINE return '__%s__' % name[2:-2] == nameNEWLINENEWLINENEWLINEclass _SentinelObject(object):NEWLINE "A unique, named, sentinel object."NEWLINE def __init__(self, name):NEWLINE self.name = nameNEWLINENEWLINE def __repr__(self):NEWLINE return 'sentinel.%s' % self.nameNEWLINENEWLINENEWLINEclass _Sentinel(object):NEWLINE """Access attributes to return a named object, usable as a sentinel."""NEWLINE def __init__(self):NEWLINE self._sentinels = {}NEWLINENEWLINE def __getattr__(self, name):NEWLINE if name == '__bases__':NEWLINE # Without this help(unittest.mock) raises an exceptionNEWLINE raise AttributeErrorNEWLINE return self._sentinels.setdefault(name, _SentinelObject(name))NEWLINENEWLINENEWLINEsentinel = _Sentinel()NEWLINENEWLINEDEFAULT = sentinel.DEFAULTNEWLINE_missing = sentinel.MISSINGNEWLINE_deleted = sentinel.DELETEDNEWLINENEWLINENEWLINEdef _copy(value):NEWLINE if type(value) in (dict, list, tuple, set):NEWLINE return type(value)(value)NEWLINE return valueNEWLINENEWLINENEWLINE_allowed_names = set(NEWLINE [NEWLINE 'return_value', '_mock_return_value', 'side_effect',NEWLINE '_mock_side_effect', '_mock_parent', '_mock_new_parent',NEWLINE '_mock_name', '_mock_new_name'NEWLINE ]NEWLINE)NEWLINENEWLINENEWLINEdef _delegating_property(name):NEWLINE _allowed_names.add(name)NEWLINE _the_name = '_mock_' + nameNEWLINE def _get(self, name=name, _the_name=_the_name):NEWLINE sig = self._mock_delegateNEWLINE if sig is None:NEWLINE return getattr(self, _the_name)NEWLINE return getattr(sig, name)NEWLINE def _set(self, value, name=name, _the_name=_the_name):NEWLINE sig = self._mock_delegateNEWLINE if sig is None:NEWLINE self.__dict__[_the_name] = valueNEWLINE else:NEWLINE setattr(sig, name, value)NEWLINENEWLINE return property(_get, _set)NEWLINENEWLINENEWLINENEWLINEclass _CallList(list):NEWLINENEWLINE def __contains__(self, value):NEWLINE if not isinstance(value, list):NEWLINE return list.__contains__(self, value)NEWLINE len_value = len(value)NEWLINE len_self = len(self)NEWLINE if len_value > len_self:NEWLINE return FalseNEWLINENEWLINE for i in range(0, len_self - len_value + 1):NEWLINE sub_list = self[i:i+len_value]NEWLINE if sub_list == value:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINE def __repr__(self):NEWLINE return pprint.pformat(list(self))NEWLINENEWLINENEWLINEdef _check_and_set_parent(parent, value, name, new_name):NEWLINE if not _is_instance_mock(value):NEWLINE return FalseNEWLINE if ((value._mock_name or value._mock_new_name) orNEWLINE (value._mock_parent is not None) orNEWLINE (value._mock_new_parent is not None)):NEWLINE return FalseNEWLINENEWLINE _parent = parentNEWLINE while _parent is not None:NEWLINE # setting a mock (value) as a child or return value of itselfNEWLINE # should not modify the mockNEWLINE if _parent is value:NEWLINE return FalseNEWLINE _parent = _parent._mock_new_parentNEWLINENEWLINE if new_name:NEWLINE value._mock_new_parent = parentNEWLINE value._mock_new_name = new_nameNEWLINE if name:NEWLINE value._mock_parent = parentNEWLINE value._mock_name = nameNEWLINE return TrueNEWLINENEWLINE# Internal class to identify if we wrapped an iterator object or not.NEWLINEclass _MockIter(object):NEWLINE def __init__(self, obj):NEWLINE self.obj = iter(obj)NEWLINE def __iter__(self):NEWLINE return selfNEWLINE def __next__(self):NEWLINE return next(self.obj)NEWLINENEWLINEclass Base(object):NEWLINE _mock_return_value = DEFAULTNEWLINE _mock_side_effect = NoneNEWLINE def __init__(self, *args, **kwargs):NEWLINE passNEWLINENEWLINENEWLINENEWLINEclass NonCallableMock(Base):NEWLINE """A non-callable version of `Mock`"""NEWLINENEWLINE def __new__(cls, *args, **kw):NEWLINE # every instance has its own classNEWLINE # so we can create magic methods on theNEWLINE # class without stomping on other mocksNEWLINE new = type(cls.__name__, (cls,), {'__doc__': cls.__doc__})NEWLINE instance = object.__new__(new)NEWLINE return instanceNEWLINENEWLINENEWLINE def __init__(NEWLINE self, spec=None, wraps=None, name=None, spec_set=None,NEWLINE parent=None, _spec_state=None, _new_name='', _new_parent=None,NEWLINE _spec_as_instance=False, _eat_self=None, unsafe=False, **kwargsNEWLINE ):NEWLINE if _new_parent is None:NEWLINE _new_parent = parentNEWLINENEWLINE __dict__ = self.__dict__NEWLINE __dict__['_mock_parent'] = parentNEWLINE __dict__['_mock_name'] = nameNEWLINE __dict__['_mock_new_name'] = _new_nameNEWLINE __dict__['_mock_new_parent'] = _new_parentNEWLINENEWLINE if spec_set is not None:NEWLINE spec = spec_setNEWLINE spec_set = TrueNEWLINE if _eat_self is None:NEWLINE _eat_self = parent is not NoneNEWLINENEWLINE self._mock_add_spec(spec, spec_set, _spec_as_instance, _eat_self)NEWLINENEWLINE __dict__['_mock_children'] = {}NEWLINE __dict__['_mock_wraps'] = wrapsNEWLINE __dict__['_mock_delegate'] = NoneNEWLINENEWLINE __dict__['_mock_called'] = FalseNEWLINE __dict__['_mock_call_args'] = NoneNEWLINE __dict__['_mock_call_count'] = 0NEWLINE __dict__['_mock_call_args_list'] = _CallList()NEWLINE __dict__['_mock_mock_calls'] = _CallList()NEWLINENEWLINE __dict__['method_calls'] = _CallList()NEWLINE __dict__['_mock_unsafe'] = unsafeNEWLINENEWLINE if kwargs:NEWLINE self.configure_mock(**kwargs)NEWLINENEWLINE _safe_super(NonCallableMock, self).__init__(NEWLINE spec, wraps, name, spec_set, parent,NEWLINE _spec_stateNEWLINE )NEWLINENEWLINENEWLINE def attach_mock(self, mock, attribute):NEWLINE """NEWLINE Attach a mock as an attribute of this one, replacing its name andNEWLINE parent. Calls to the attached mock will be recorded in theNEWLINE `method_calls` and `mock_calls` attributes of this one."""NEWLINE mock._mock_parent = NoneNEWLINE mock._mock_new_parent = NoneNEWLINE mock._mock_name = ''NEWLINE mock._mock_new_name = NoneNEWLINENEWLINE setattr(self, attribute, mock)NEWLINENEWLINENEWLINE def mock_add_spec(self, spec, spec_set=False):NEWLINE """Add a spec to a mock. `spec` can either be an object or aNEWLINE list of strings. Only attributes on the `spec` can be fetched asNEWLINE attributes from the mock.NEWLINENEWLINE If `spec_set` is True then only attributes on the spec can be set."""NEWLINE self._mock_add_spec(spec, spec_set)NEWLINENEWLINENEWLINE def _mock_add_spec(self, spec, spec_set, _spec_as_instance=False,NEWLINE _eat_self=False):NEWLINE _spec_class = NoneNEWLINE _spec_signature = NoneNEWLINENEWLINE if spec is not None and not _is_list(spec):NEWLINE if isinstance(spec, type):NEWLINE _spec_class = specNEWLINE else:NEWLINE _spec_class = _get_class(spec)NEWLINE res = _get_signature_object(spec,NEWLINE _spec_as_instance, _eat_self)NEWLINE _spec_signature = res and res[1]NEWLINENEWLINE spec = dir(spec)NEWLINENEWLINE __dict__ = self.__dict__NEWLINE __dict__['_spec_class'] = _spec_classNEWLINE __dict__['_spec_set'] = spec_setNEWLINE __dict__['_spec_signature'] = _spec_signatureNEWLINE __dict__['_mock_methods'] = specNEWLINENEWLINENEWLINE def __get_return_value(self):NEWLINE ret = self._mock_return_valueNEWLINE if self._mock_delegate is not None:NEWLINE ret = self._mock_delegate.return_valueNEWLINENEWLINE if ret is DEFAULT:NEWLINE ret = self._get_child_mock(NEWLINE _new_parent=self, _new_name='()'NEWLINE )NEWLINE self.return_value = retNEWLINE return retNEWLINENEWLINENEWLINE def __set_return_value(self, value):NEWLINE if self._mock_delegate is not None:NEWLINE self._mock_delegate.return_value = valueNEWLINE else:NEWLINE self._mock_return_value = valueNEWLINE _check_and_set_parent(self, value, None, '()')NEWLINENEWLINE __return_value_doc = "The value to be returned when the mock is called."NEWLINE return_value = property(__get_return_value, __set_return_value,NEWLINE __return_value_doc)NEWLINENEWLINENEWLINE @propertyNEWLINE def __class__(self):NEWLINE if self._spec_class is None:NEWLINE return type(self)NEWLINE return self._spec_classNEWLINENEWLINE called = _delegating_property('called')NEWLINE call_count = _delegating_property('call_count')NEWLINE call_args = _delegating_property('call_args')NEWLINE call_args_list = _delegating_property('call_args_list')NEWLINE mock_calls = _delegating_property('mock_calls')NEWLINENEWLINENEWLINE def __get_side_effect(self):NEWLINE delegated = self._mock_delegateNEWLINE if delegated is None:NEWLINE return self._mock_side_effectNEWLINE sf = delegated.side_effectNEWLINE if sf is not None and not callable(sf) and not isinstance(sf, _MockIter):NEWLINE sf = _MockIter(sf)NEWLINE delegated.side_effect = sfNEWLINE return sfNEWLINENEWLINE def __set_side_effect(self, value):NEWLINE value = _try_iter(value)NEWLINE delegated = self._mock_delegateNEWLINE if delegated is None:NEWLINE self._mock_side_effect = valueNEWLINE else:NEWLINE delegated.side_effect = valueNEWLINENEWLINE side_effect = property(__get_side_effect, __set_side_effect)NEWLINENEWLINENEWLINE def reset_mock(self):NEWLINE "Restore the mock object to its initial state."NEWLINE self.called = FalseNEWLINE self.call_args = NoneNEWLINE self.call_count = 0NEWLINE self.mock_calls = _CallList()NEWLINE self.call_args_list = _CallList()NEWLINE self.method_calls = _CallList()NEWLINENEWLINE for child in self._mock_children.values():NEWLINE if isinstance(child, _SpecState):NEWLINE continueNEWLINE child.reset_mock()NEWLINENEWLINE ret = self._mock_return_valueNEWLINE if _is_instance_mock(ret) and ret is not self:NEWLINE ret.reset_mock()NEWLINENEWLINENEWLINE def configure_mock(self, **kwargs):NEWLINE """Set attributes on the mock through keyword arguments.NEWLINENEWLINE Attributes plus return values and side effects can be set on childNEWLINE mocks using standard dot notation and unpacking a dictionary in theNEWLINE method call:NEWLINENEWLINE >>> attrs = {'method.return_value': 3, 'other.side_effect': KeyError}NEWLINE >>> mock.configure_mock(**attrs)"""NEWLINE for arg, val in sorted(kwargs.items(),NEWLINE # we sort on the number of dots so thatNEWLINE # attributes are set before we set attributes onNEWLINE # attributesNEWLINE key=lambda entry: entry[0].count('.')):NEWLINE args = arg.split('.')NEWLINE final = args.pop()NEWLINE obj = selfNEWLINE for entry in args:NEWLINE obj = getattr(obj, entry)NEWLINE setattr(obj, final, val)NEWLINENEWLINENEWLINE def __getattr__(self, name):NEWLINE if name in {'_mock_methods', '_mock_unsafe'}:NEWLINE raise AttributeError(name)NEWLINE elif self._mock_methods is not None:NEWLINE if name not in self._mock_methods or name in _all_magics:NEWLINE raise AttributeError("Mock object has no attribute %r" % name)NEWLINE elif _is_magic(name):NEWLINE raise AttributeError(name)NEWLINE if not self._mock_unsafe:NEWLINE if name.startswith(('assert', 'assret')):NEWLINE raise AttributeError(name)NEWLINENEWLINE result = self._mock_children.get(name)NEWLINE if result is _deleted:NEWLINE raise AttributeError(name)NEWLINE elif result is None:NEWLINE wraps = NoneNEWLINE if self._mock_wraps is not None:NEWLINE # XXXX should we get the attribute without triggering codeNEWLINE # execution?NEWLINE wraps = getattr(self._mock_wraps, name)NEWLINENEWLINE result = self._get_child_mock(NEWLINE parent=self, name=name, wraps=wraps, _new_name=name,NEWLINE _new_parent=selfNEWLINE )NEWLINE self._mock_children[name] = resultNEWLINENEWLINE elif isinstance(result, _SpecState):NEWLINE result = create_autospec(NEWLINE result.spec, result.spec_set, result.instance,NEWLINE result.parent, result.nameNEWLINE )NEWLINE self._mock_children[name] = resultNEWLINENEWLINE return resultNEWLINENEWLINENEWLINE def __repr__(self):NEWLINE _name_list = [self._mock_new_name]NEWLINE _parent = self._mock_new_parentNEWLINE last = selfNEWLINENEWLINE dot = '.'NEWLINE if _name_list == ['()']:NEWLINE dot = ''NEWLINE seen = set()NEWLINE while _parent is not None:NEWLINE last = _parentNEWLINENEWLINE _name_list.append(_parent._mock_new_name + dot)NEWLINE dot = '.'NEWLINE if _parent._mock_new_name == '()':NEWLINE dot = ''NEWLINENEWLINE _parent = _parent._mock_new_parentNEWLINENEWLINE # use ids here so as not to call __hash__ on the mocksNEWLINE if id(_parent) in seen:NEWLINE breakNEWLINE seen.add(id(_parent))NEWLINENEWLINE _name_list = list(reversed(_name_list))NEWLINE _first = last._mock_name or 'mock'NEWLINE if len(_name_list) > 1:NEWLINE if _name_list[1] not in ('()', '().'):NEWLINE _first += '.'NEWLINE _name_list[0] = _firstNEWLINE name = ''.join(_name_list)NEWLINENEWLINE name_string = ''NEWLINE if name not in ('mock', 'mock.'):NEWLINE name_string = ' name=%r' % nameNEWLINENEWLINE spec_string = ''NEWLINE if self._spec_class is not None:NEWLINE spec_string = ' spec=%r'NEWLINE if self._spec_set:NEWLINE spec_string = ' spec_set=%r'NEWLINE spec_string = spec_string % self._spec_class.__name__NEWLINE return "<%s%s%s id='%s'>" % (NEWLINE type(self).__name__,NEWLINE name_string,NEWLINE spec_string,NEWLINE id(self)NEWLINE )NEWLINENEWLINENEWLINE def __dir__(self):NEWLINE """Filter the output of `dir(mock)` to only useful members."""NEWLINE if not FILTER_DIR:NEWLINE return object.__dir__(self)NEWLINENEWLINE extras = self._mock_methods or []NEWLINE from_type = dir(type(self))NEWLINE from_dict = list(self.__dict__)NEWLINENEWLINE from_type = [e for e in from_type if not e.startswith('_')]NEWLINE from_dict = [e for e in from_dict if not e.startswith('_') orNEWLINE _is_magic(e)]NEWLINE return sorted(set(extras + from_type + from_dict +NEWLINE list(self._mock_children)))NEWLINENEWLINENEWLINE def __setattr__(self, name, value):NEWLINE if name in _allowed_names:NEWLINE # property setters go through hereNEWLINE return object.__setattr__(self, name, value)NEWLINE elif (self._spec_set and self._mock_methods is not None andNEWLINE name not in self._mock_methods andNEWLINE name not in self.__dict__):NEWLINE raise AttributeError("Mock object has no attribute '%s'" % name)NEWLINE elif name in _unsupported_magics:NEWLINE msg = 'Attempting to set unsupported magic method %r.' % nameNEWLINE raise AttributeError(msg)NEWLINE elif name in _all_magics:NEWLINE if self._mock_methods is not None and name not in self._mock_methods:NEWLINE raise AttributeError("Mock object has no attribute '%s'" % name)NEWLINENEWLINE if not _is_instance_mock(value):NEWLINE setattr(type(self), name, _get_method(name, value))NEWLINE original = valueNEWLINE value = lambda *args, **kw: original(self, *args, **kw)NEWLINE else:NEWLINE # only set _new_name and not name so that mock_calls is trackedNEWLINE # but not method callsNEWLINE _check_and_set_parent(self, value, None, name)NEWLINE setattr(type(self), name, value)NEWLINE self._mock_children[name] = valueNEWLINE elif name == '__class__':NEWLINE self._spec_class = valueNEWLINE returnNEWLINE else:NEWLINE if _check_and_set_parent(self, value, name, name):NEWLINE self._mock_children[name] = valueNEWLINE return object.__setattr__(self, name, value)NEWLINENEWLINENEWLINE def __delattr__(self, name):NEWLINE if name in _all_magics and name in type(self).__dict__:NEWLINE delattr(type(self), name)NEWLINE if name not in self.__dict__:NEWLINE # for magic methods that are still MagicProxy objects andNEWLINE # not set on the instance itselfNEWLINE returnNEWLINENEWLINE if name in self.__dict__:NEWLINE object.__delattr__(self, name)NEWLINENEWLINE obj = self._mock_children.get(name, _missing)NEWLINE if obj is _deleted:NEWLINE raise AttributeError(name)NEWLINE if obj is not _missing:NEWLINE del self._mock_children[name]NEWLINE self._mock_children[name] = _deletedNEWLINENEWLINENEWLINE def _format_mock_call_signature(self, args, kwargs):NEWLINE name = self._mock_name or 'mock'NEWLINE return _format_call_signature(name, args, kwargs)NEWLINENEWLINENEWLINE def _format_mock_failure_message(self, args, kwargs):NEWLINE message = 'Expected call: %s\nActual call: %s'NEWLINE expected_string = self._format_mock_call_signature(args, kwargs)NEWLINE call_args = self.call_argsNEWLINE if len(call_args) == 3:NEWLINE call_args = call_args[1:]NEWLINE actual_string = self._format_mock_call_signature(*call_args)NEWLINE return message % (expected_string, actual_string)NEWLINENEWLINENEWLINE def _call_matcher(self, _call):NEWLINE """NEWLINE Given a call (or simply a (args, kwargs) tuple), return aNEWLINE comparison key suitable for matching with other calls.NEWLINE This is a best effort method which relies on the spec's signature,NEWLINE if available, or falls back on the arguments themselves.NEWLINE """NEWLINE sig = self._spec_signatureNEWLINE if sig is not None:NEWLINE if len(_call) == 2:NEWLINE name = ''NEWLINE args, kwargs = _callNEWLINE else:NEWLINE name, args, kwargs = _callNEWLINE try:NEWLINE return name, sig.bind(*args, **kwargs)NEWLINE except TypeError as e:NEWLINE return e.with_traceback(None)NEWLINE else:NEWLINE return _callNEWLINENEWLINE def assert_not_called(_mock_self):NEWLINE """assert that the mock was never called.NEWLINE """NEWLINE self = _mock_selfNEWLINE if self.call_count != 0:NEWLINE msg = ("Expected '%s' to not have been called. Called %s times." %NEWLINE (self._mock_name or 'mock', self.call_count))NEWLINE raise AssertionError(msg)NEWLINENEWLINE def assert_called_with(_mock_self, *args, **kwargs):NEWLINE """assert that the mock was called with the specified arguments.NEWLINENEWLINE Raises an AssertionError if the args and keyword args passed in areNEWLINE different to the last call to the mock."""NEWLINE self = _mock_selfNEWLINE if self.call_args is None:NEWLINE expected = self._format_mock_call_signature(args, kwargs)NEWLINE raise AssertionError('Expected call: %s\nNot called' % (expected,))NEWLINENEWLINE def _error_message():NEWLINE msg = self._format_mock_failure_message(args, kwargs)NEWLINE return msgNEWLINE expected = self._call_matcher((args, kwargs))NEWLINE actual = self._call_matcher(self.call_args)NEWLINE if expected != actual:NEWLINE cause = expected if isinstance(expected, Exception) else NoneNEWLINE raise AssertionError(_error_message()) from causeNEWLINENEWLINENEWLINE def assert_called_once_with(_mock_self, *args, **kwargs):NEWLINE """assert that the mock was called exactly once and with the specifiedNEWLINE arguments."""NEWLINE self = _mock_selfNEWLINE if not self.call_count == 1:NEWLINE msg = ("Expected '%s' to be called once. Called %s times." %NEWLINE (self._mock_name or 'mock', self.call_count))NEWLINE raise AssertionError(msg)NEWLINE return self.assert_called_with(*args, **kwargs)NEWLINENEWLINENEWLINE def assert_has_calls(self, calls, any_order=False):NEWLINE """assert the mock has been called with the specified calls.NEWLINE The `mock_calls` list is checked for the calls.NEWLINENEWLINE If `any_order` is False (the default) then the calls must beNEWLINE sequential. There can be extra calls before or after theNEWLINE specified calls.NEWLINENEWLINE If `any_order` is True then the calls can be in any order, butNEWLINE they must all appear in `mock_calls`."""NEWLINE expected = [self._call_matcher(c) for c in calls]NEWLINE cause = expected if isinstance(expected, Exception) else NoneNEWLINE all_calls = _CallList(self._call_matcher(c) for c in self.mock_calls)NEWLINE if not any_order:NEWLINE if expected not in all_calls:NEWLINE raise AssertionError(NEWLINE 'Calls not found.\nExpected: %r\n'NEWLINE 'Actual: %r' % (calls, self.mock_calls)NEWLINE ) from causeNEWLINE returnNEWLINENEWLINE all_calls = list(all_calls)NEWLINENEWLINE not_found = []NEWLINE for kall in expected:NEWLINE try:NEWLINE all_calls.remove(kall)NEWLINE except ValueError:NEWLINE not_found.append(kall)NEWLINE if not_found:NEWLINE raise AssertionError(NEWLINE '%r not all found in call list' % (tuple(not_found),)NEWLINE ) from causeNEWLINENEWLINENEWLINE def assert_any_call(self, *args, **kwargs):NEWLINE """assert the mock has been called with the specified arguments.NEWLINENEWLINE The assert passes if the mock has *ever* been called, unlikeNEWLINE `assert_called_with` and `assert_called_once_with` that only pass ifNEWLINE the call is the most recent one."""NEWLINE expected = self._call_matcher((args, kwargs))NEWLINE actual = [self._call_matcher(c) for c in self.call_args_list]NEWLINE if expected not in actual:NEWLINE cause = expected if isinstance(expected, Exception) else NoneNEWLINE expected_string = self._format_mock_call_signature(args, kwargs)NEWLINE raise AssertionError(NEWLINE '%s call not found' % expected_stringNEWLINE ) from causeNEWLINENEWLINENEWLINE def _get_child_mock(self, **kw):NEWLINE """Create the child mocks for attributes and return value.NEWLINE By default child mocks will be the same type as the parent.NEWLINE Subclasses of Mock may want to override this to customize the wayNEWLINE child mocks are made.NEWLINENEWLINE For non-callable mocks the callable variant will be used (rather thanNEWLINE any custom subclass)."""NEWLINE _type = type(self)NEWLINE if not issubclass(_type, CallableMixin):NEWLINE if issubclass(_type, NonCallableMagicMock):NEWLINE klass = MagicMockNEWLINE elif issubclass(_type, NonCallableMock) :NEWLINE klass = MockNEWLINE else:NEWLINE klass = _type.__mro__[1]NEWLINE return klass(**kw)NEWLINENEWLINENEWLINENEWLINEdef _try_iter(obj):NEWLINE if obj is None:NEWLINE return objNEWLINE if _is_exception(obj):NEWLINE return objNEWLINE if _callable(obj):NEWLINE return objNEWLINE try:NEWLINE return iter(obj)NEWLINE except TypeError:NEWLINE # XXXX backwards compatibilityNEWLINE # but this will blow up on first call - so maybe we should fail early?NEWLINE return objNEWLINENEWLINENEWLINENEWLINEclass CallableMixin(Base):NEWLINENEWLINE def __init__(self, spec=None, side_effect=None, return_value=DEFAULT,NEWLINE wraps=None, name=None, spec_set=None, parent=None,NEWLINE _spec_state=None, _new_name='', _new_parent=None, **kwargs):NEWLINE self.__dict__['_mock_return_value'] = return_valueNEWLINENEWLINE _safe_super(CallableMixin, self).__init__(NEWLINE spec, wraps, name, spec_set, parent,NEWLINE _spec_state, _new_name, _new_parent, **kwargsNEWLINE )NEWLINENEWLINE self.side_effect = side_effectNEWLINENEWLINENEWLINE def _mock_check_sig(self, *args, **kwargs):NEWLINE # stub method that can be replaced with one with a specific signatureNEWLINE passNEWLINENEWLINENEWLINE def __call__(_mock_self, *args, **kwargs):NEWLINE # can't use self in-case a function / method we are mocking uses selfNEWLINE # in the signatureNEWLINE _mock_self._mock_check_sig(*args, **kwargs)NEWLINE return _mock_self._mock_call(*args, **kwargs)NEWLINENEWLINENEWLINE def _mock_call(_mock_self, *args, **kwargs):NEWLINE self = _mock_selfNEWLINE self.called = TrueNEWLINE self.call_count += 1NEWLINE _new_name = self._mock_new_nameNEWLINE _new_parent = self._mock_new_parentNEWLINENEWLINE _call = _Call((args, kwargs), two=True)NEWLINE self.call_args = _callNEWLINE self.call_args_list.append(_call)NEWLINE self.mock_calls.append(_Call(('', args, kwargs)))NEWLINENEWLINE seen = set()NEWLINE skip_next_dot = _new_name == '()'NEWLINE do_method_calls = self._mock_parent is not NoneNEWLINE name = self._mock_nameNEWLINE while _new_parent is not None:NEWLINE this_mock_call = _Call((_new_name, args, kwargs))NEWLINE if _new_parent._mock_new_name:NEWLINE dot = '.'NEWLINE if skip_next_dot:NEWLINE dot = ''NEWLINENEWLINE skip_next_dot = FalseNEWLINE if _new_parent._mock_new_name == '()':NEWLINE skip_next_dot = TrueNEWLINENEWLINE _new_name = _new_parent._mock_new_name + dot + _new_nameNEWLINENEWLINE if do_method_calls:NEWLINE if _new_name == name:NEWLINE this_method_call = this_mock_callNEWLINE else:NEWLINE this_method_call = _Call((name, args, kwargs))NEWLINE _new_parent.method_calls.append(this_method_call)NEWLINENEWLINE do_method_calls = _new_parent._mock_parent is not NoneNEWLINE if do_method_calls:NEWLINE name = _new_parent._mock_name + '.' + nameNEWLINENEWLINE _new_parent.mock_calls.append(this_mock_call)NEWLINE _new_parent = _new_parent._mock_new_parentNEWLINENEWLINE # use ids here so as not to call __hash__ on the mocksNEWLINE _new_parent_id = id(_new_parent)NEWLINE if _new_parent_id in seen:NEWLINE breakNEWLINE seen.add(_new_parent_id)NEWLINENEWLINE ret_val = DEFAULTNEWLINE effect = self.side_effectNEWLINE if effect is not None:NEWLINE if _is_exception(effect):NEWLINE raise effectNEWLINENEWLINE if not _callable(effect):NEWLINE result = next(effect)NEWLINE if _is_exception(result):NEWLINE raise resultNEWLINE if result is DEFAULT:NEWLINE result = self.return_valueNEWLINE return resultNEWLINENEWLINE ret_val = effect(*args, **kwargs)NEWLINENEWLINE if (self._mock_wraps is not None andNEWLINE self._mock_return_value is DEFAULT):NEWLINE return self._mock_wraps(*args, **kwargs)NEWLINE if ret_val is DEFAULT:NEWLINE ret_val = self.return_valueNEWLINE return ret_valNEWLINENEWLINENEWLINENEWLINEclass Mock(CallableMixin, NonCallableMock):NEWLINE """NEWLINE Create a new `Mock` object. `Mock` takes several optional argumentsNEWLINE that specify the behaviour of the Mock object:NEWLINENEWLINE * `spec`: This can be either a list of strings or an existing object (aNEWLINE class or instance) that acts as the specification for the mock object. IfNEWLINE you pass in an object then a list of strings is formed by calling dir onNEWLINE the object (excluding unsupported magic attributes and methods). AccessingNEWLINE any attribute not in this list will raise an `AttributeError`.NEWLINENEWLINE If `spec` is an object (rather than a list of strings) thenNEWLINE `mock.__class__` returns the class of the spec object. This allows mocksNEWLINE to pass `isinstance` tests.NEWLINENEWLINE * `spec_set`: A stricter variant of `spec`. If used, attempting to *set*NEWLINE or get an attribute on the mock that isn't on the object passed asNEWLINE `spec_set` will raise an `AttributeError`.NEWLINENEWLINE * `side_effect`: A function to be called whenever the Mock is called. SeeNEWLINE the `side_effect` attribute. Useful for raising exceptions orNEWLINE dynamically changing return values. The function is called with the sameNEWLINE arguments as the mock, and unless it returns `DEFAULT`, the returnNEWLINE value of this function is used as the return value.NEWLINENEWLINE If `side_effect` is an iterable then each call to the mock will returnNEWLINE the next value from the iterable. If any of the members of the iterableNEWLINE are exceptions they will be raised instead of returned.NEWLINENEWLINE * `return_value`: The value returned when the mock is called. By defaultNEWLINE this is a new Mock (created on first access). See theNEWLINE `return_value` attribute.NEWLINENEWLINE * `wraps`: Item for the mock object to wrap. If `wraps` is not None thenNEWLINE calling the Mock will pass the call through to the wrapped objectNEWLINE (returning the real result). Attribute access on the mock will return aNEWLINE Mock object that wraps the corresponding attribute of the wrapped objectNEWLINE (so attempting to access an attribute that doesn't exist will raise anNEWLINE `AttributeError`).NEWLINENEWLINE If the mock has an explicit `return_value` set then calls are not passedNEWLINE to the wrapped object and the `return_value` is returned instead.NEWLINENEWLINE * `name`: If the mock has a name then it will be used in the repr of theNEWLINE mock. This can be useful for debugging. The name is propagated to childNEWLINE mocks.NEWLINENEWLINE Mocks can also be called with arbitrary keyword arguments. These will beNEWLINE used to set attributes on the mock after it is created.NEWLINE """NEWLINENEWLINENEWLINENEWLINEdef _dot_lookup(thing, comp, import_path):NEWLINE try:NEWLINE return getattr(thing, comp)NEWLINE except AttributeError:NEWLINE __import__(import_path)NEWLINE return getattr(thing, comp)NEWLINENEWLINENEWLINEdef _importer(target):NEWLINE components = target.split('.')NEWLINE import_path = components.pop(0)NEWLINE thing = __import__(import_path)NEWLINENEWLINE for comp in components:NEWLINE import_path += ".%s" % compNEWLINE thing = _dot_lookup(thing, comp, import_path)NEWLINE return thingNEWLINENEWLINENEWLINEdef _is_started(patcher):NEWLINE # XXXX horribleNEWLINE return hasattr(patcher, 'is_local')NEWLINENEWLINENEWLINEclass _patch(object):NEWLINENEWLINE attribute_name = NoneNEWLINE _active_patches = []NEWLINENEWLINE def __init__(NEWLINE self, getter, attribute, new, spec, create,NEWLINE spec_set, autospec, new_callable, kwargsNEWLINE ):NEWLINE if new_callable is not None:NEWLINE if new is not DEFAULT:NEWLINE raise ValueError(NEWLINE "Cannot use 'new' and 'new_callable' together"NEWLINE )NEWLINE if autospec is not None:NEWLINE raise ValueError(NEWLINE "Cannot use 'autospec' and 'new_callable' together"NEWLINE )NEWLINENEWLINE self.getter = getterNEWLINE self.attribute = attributeNEWLINE self.new = newNEWLINE self.new_callable = new_callableNEWLINE self.spec = specNEWLINE self.create = createNEWLINE self.has_local = FalseNEWLINE self.spec_set = spec_setNEWLINE self.autospec = autospecNEWLINE self.kwargs = kwargsNEWLINE self.additional_patchers = []NEWLINENEWLINENEWLINE def copy(self):NEWLINE patcher = _patch(NEWLINE self.getter, self.attribute, self.new, self.spec,NEWLINE self.create, self.spec_set,NEWLINE self.autospec, self.new_callable, self.kwargsNEWLINE )NEWLINE patcher.attribute_name = self.attribute_nameNEWLINE patcher.additional_patchers = [NEWLINE p.copy() for p in self.additional_patchersNEWLINE ]NEWLINE return patcherNEWLINENEWLINENEWLINE def __call__(self, func):NEWLINE if isinstance(func, type):NEWLINE return self.decorate_class(func)NEWLINE return self.decorate_callable(func)NEWLINENEWLINENEWLINE def decorate_class(self, klass):NEWLINE for attr in dir(klass):NEWLINE if not attr.startswith(patch.TEST_PREFIX):NEWLINE continueNEWLINENEWLINE attr_value = getattr(klass, attr)NEWLINE if not hasattr(attr_value, "__call__"):NEWLINE continueNEWLINENEWLINE patcher = self.copy()NEWLINE setattr(klass, attr, patcher(attr_value))NEWLINE return klassNEWLINENEWLINENEWLINE def decorate_callable(self, func):NEWLINE if hasattr(func, 'patchings'):NEWLINE func.patchings.append(self)NEWLINE return funcNEWLINENEWLINE @wraps(func)NEWLINE def patched(*args, **keywargs):NEWLINE extra_args = []NEWLINE entered_patchers = []NEWLINENEWLINE exc_info = tuple()NEWLINE try:NEWLINE for patching in patched.patchings:NEWLINE arg = patching.__enter__()NEWLINE entered_patchers.append(patching)NEWLINE if patching.attribute_name is not None:NEWLINE keywargs.update(arg)NEWLINE elif patching.new is DEFAULT:NEWLINE extra_args.append(arg)NEWLINENEWLINE args += tuple(extra_args)NEWLINE return func(*args, **keywargs)NEWLINE except:NEWLINE if (patching not in entered_patchers andNEWLINE _is_started(patching)):NEWLINE # the patcher may have been started, but an exceptionNEWLINE # raised whilst entering one of its additional_patchersNEWLINE entered_patchers.append(patching)NEWLINE # Pass the exception to __exit__NEWLINE exc_info = sys.exc_info()NEWLINE # re-raise the exceptionNEWLINE raiseNEWLINE finally:NEWLINE for patching in reversed(entered_patchers):NEWLINE patching.__exit__(*exc_info)NEWLINENEWLINE patched.patchings = [self]NEWLINE return patchedNEWLINENEWLINENEWLINE def get_original(self):NEWLINE target = self.getter()NEWLINE name = self.attributeNEWLINENEWLINE original = DEFAULTNEWLINE local = FalseNEWLINENEWLINE try:NEWLINE original = target.__dict__[name]NEWLINE except (AttributeError, KeyError):NEWLINE original = getattr(target, name, DEFAULT)NEWLINE else:NEWLINE local = TrueNEWLINENEWLINE if name in _builtins and isinstance(target, ModuleType):NEWLINE self.create = TrueNEWLINENEWLINE if not self.create and original is DEFAULT:NEWLINE raise AttributeError(NEWLINE "%s does not have the attribute %r" % (target, name)NEWLINE )NEWLINE return original, localNEWLINENEWLINENEWLINE def __enter__(self):NEWLINE """Perform the patch."""NEWLINE new, spec, spec_set = self.new, self.spec, self.spec_setNEWLINE autospec, kwargs = self.autospec, self.kwargsNEWLINE new_callable = self.new_callableNEWLINE self.target = self.getter()NEWLINENEWLINE # normalise False to NoneNEWLINE if spec is False:NEWLINE spec = NoneNEWLINE if spec_set is False:NEWLINE spec_set = NoneNEWLINE if autospec is False:NEWLINE autospec = NoneNEWLINENEWLINE if spec is not None and autospec is not None:NEWLINE raise TypeError("Can't specify spec and autospec")NEWLINE if ((spec is not None or autospec is not None) andNEWLINE spec_set not in (True, None)):NEWLINE raise TypeError("Can't provide explicit spec_set *and* spec or autospec")NEWLINENEWLINE original, local = self.get_original()NEWLINENEWLINE if new is DEFAULT and autospec is None:NEWLINE inherit = FalseNEWLINE if spec is True:NEWLINE # set spec to the object we are replacingNEWLINE spec = originalNEWLINE if spec_set is True:NEWLINE spec_set = originalNEWLINE spec = NoneNEWLINE elif spec is not None:NEWLINE if spec_set is True:NEWLINE spec_set = specNEWLINE spec = NoneNEWLINE elif spec_set is True:NEWLINE spec_set = originalNEWLINENEWLINE if spec is not None or spec_set is not None:NEWLINE if original is DEFAULT:NEWLINE raise TypeError("Can't use 'spec' with create=True")NEWLINE if isinstance(original, type):NEWLINE # If we're patching out a class and there is a specNEWLINE inherit = TrueNEWLINENEWLINE Klass = MagicMockNEWLINE _kwargs = {}NEWLINE if new_callable is not None:NEWLINE Klass = new_callableNEWLINE elif spec is not None or spec_set is not None:NEWLINE this_spec = specNEWLINE if spec_set is not None:NEWLINE this_spec = spec_setNEWLINE if _is_list(this_spec):NEWLINE not_callable = '__call__' not in this_specNEWLINE else:NEWLINE not_callable = not callable(this_spec)NEWLINE if not_callable:NEWLINE Klass = NonCallableMagicMockNEWLINENEWLINE if spec is not None:NEWLINE _kwargs['spec'] = specNEWLINE if spec_set is not None:NEWLINE _kwargs['spec_set'] = spec_setNEWLINENEWLINE # add a name to mocksNEWLINE if (isinstance(Klass, type) andNEWLINE issubclass(Klass, NonCallableMock) and self.attribute):NEWLINE _kwargs['name'] = self.attributeNEWLINENEWLINE _kwargs.update(kwargs)NEWLINE new = Klass(**_kwargs)NEWLINENEWLINE if inherit and _is_instance_mock(new):NEWLINE # we can only tell if the instance should be callable if theNEWLINE # spec is not a listNEWLINE this_spec = specNEWLINE if spec_set is not None:NEWLINE this_spec = spec_setNEWLINE if (not _is_list(this_spec) and notNEWLINE _instance_callable(this_spec)):NEWLINE Klass = NonCallableMagicMockNEWLINENEWLINE _kwargs.pop('name')NEWLINE new.return_value = Klass(_new_parent=new, _new_name='()',NEWLINE **_kwargs)NEWLINE elif autospec is not None:NEWLINE # spec is ignored, new *must* be default, spec_set is treatedNEWLINE # as a boolean. Should we check spec is not None and that spec_setNEWLINE # is a bool?NEWLINE if new is not DEFAULT:NEWLINE raise TypeError(NEWLINE "autospec creates the mock for you. Can't specify "NEWLINE "autospec and new."NEWLINE )NEWLINE if original is DEFAULT:NEWLINE raise TypeError("Can't use 'autospec' with create=True")NEWLINE spec_set = bool(spec_set)NEWLINE if autospec is True:NEWLINE autospec = originalNEWLINENEWLINE new = create_autospec(autospec, spec_set=spec_set,NEWLINE _name=self.attribute, **kwargs)NEWLINE elif kwargs:NEWLINE # can't set keyword args when we aren't creating the mockNEWLINE # XXXX If new is a Mock we could call new.configure_mock(**kwargs)NEWLINE raise TypeError("Can't pass kwargs to a mock we aren't creating")NEWLINENEWLINE new_attr = newNEWLINENEWLINE self.temp_original = originalNEWLINE self.is_local = localNEWLINE setattr(self.target, self.attribute, new_attr)NEWLINE if self.attribute_name is not None:NEWLINE extra_args = {}NEWLINE if self.new is DEFAULT:NEWLINE extra_args[self.attribute_name] = newNEWLINE for patching in self.additional_patchers:NEWLINE arg = patching.__enter__()NEWLINE if patching.new is DEFAULT:NEWLINE extra_args.update(arg)NEWLINE return extra_argsNEWLINENEWLINE return newNEWLINENEWLINENEWLINE def __exit__(self, *exc_info):NEWLINE """Undo the patch."""NEWLINE if not _is_started(self):NEWLINE raise RuntimeError('stop called on unstarted patcher')NEWLINENEWLINE if self.is_local and self.temp_original is not DEFAULT:NEWLINE setattr(self.target, self.attribute, self.temp_original)NEWLINE else:NEWLINE delattr(self.target, self.attribute)NEWLINE if not self.create and not hasattr(self.target, self.attribute):NEWLINE # needed for proxy objects like django settingsNEWLINE setattr(self.target, self.attribute, self.temp_original)NEWLINENEWLINE del self.temp_originalNEWLINE del self.is_localNEWLINE del self.targetNEWLINE for patcher in reversed(self.additional_patchers):NEWLINE if _is_started(patcher):NEWLINE patcher.__exit__(*exc_info)NEWLINENEWLINENEWLINE def start(self):NEWLINE """Activate a patch, returning any created mock."""NEWLINE result = self.__enter__()NEWLINE self._active_patches.append(self)NEWLINE return resultNEWLINENEWLINENEWLINE def stop(self):NEWLINE """Stop an active patch."""NEWLINE try:NEWLINE self._active_patches.remove(self)NEWLINE except ValueError:NEWLINE # If the patch hasn't been started this will failNEWLINE passNEWLINENEWLINE return self.__exit__()NEWLINENEWLINENEWLINENEWLINEdef _get_target(target):NEWLINE try:NEWLINE target, attribute = target.rsplit('.', 1)NEWLINE except (TypeError, ValueError):NEWLINE raise TypeError("Need a valid target to patch. You supplied: %r" %NEWLINE (target,))NEWLINE getter = lambda: _importer(target)NEWLINE return getter, attributeNEWLINENEWLINENEWLINEdef _patch_object(NEWLINE target, attribute, new=DEFAULT, spec=None,NEWLINE create=False, spec_set=None, autospec=None,NEWLINE new_callable=None, **kwargsNEWLINE ):NEWLINE """NEWLINE patch the named member (`attribute`) on an object (`target`) with a mockNEWLINE object.NEWLINENEWLINE `patch.object` can be used as a decorator, class decorator or a contextNEWLINE manager. Arguments `new`, `spec`, `create`, `spec_set`,NEWLINE `autospec` and `new_callable` have the same meaning as for `patch`. LikeNEWLINE `patch`, `patch.object` takes arbitrary keyword arguments for configuringNEWLINE the mock object it creates.NEWLINENEWLINE When used as a class decorator `patch.object` honours `patch.TEST_PREFIX`NEWLINE for choosing which methods to wrap.NEWLINE """NEWLINE getter = lambda: targetNEWLINE return _patch(NEWLINE getter, attribute, new, spec, create,NEWLINE spec_set, autospec, new_callable, kwargsNEWLINE )NEWLINENEWLINENEWLINEdef _patch_multiple(target, spec=None, create=False, spec_set=None,NEWLINE autospec=None, new_callable=None, **kwargs):NEWLINE """Perform multiple patches in a single call. It takes the object to beNEWLINE patched (either as an object or a string to fetch the object by importing)NEWLINE and keyword arguments for the patches::NEWLINENEWLINE with patch.multiple(settings, FIRST_PATCH='one', SECOND_PATCH='two'):NEWLINE ...NEWLINENEWLINE Use `DEFAULT` as the value if you want `patch.multiple` to createNEWLINE mocks for you. In this case the created mocks are passed into a decoratedNEWLINE function by keyword, and a dictionary is returned when `patch.multiple` isNEWLINE used as a context manager.NEWLINENEWLINE `patch.multiple` can be used as a decorator, class decorator or a contextNEWLINE manager. The arguments `spec`, `spec_set`, `create`,NEWLINE `autospec` and `new_callable` have the same meaning as for `patch`. TheseNEWLINE arguments will be applied to *all* patches done by `patch.multiple`.NEWLINENEWLINE When used as a class decorator `patch.multiple` honours `patch.TEST_PREFIX`NEWLINE for choosing which methods to wrap.NEWLINE """NEWLINE if type(target) is str:NEWLINE getter = lambda: _importer(target)NEWLINE else:NEWLINE getter = lambda: targetNEWLINENEWLINE if not kwargs:NEWLINE raise ValueError(NEWLINE 'Must supply at least one keyword argument with patch.multiple'NEWLINE )NEWLINE # need to wrap in a list for python 3, where items is a viewNEWLINE items = list(kwargs.items())NEWLINE attribute, new = items[0]NEWLINE patcher = _patch(NEWLINE getter, attribute, new, spec, create, spec_set,NEWLINE autospec, new_callable, {}NEWLINE )NEWLINE patcher.attribute_name = attributeNEWLINE for attribute, new in items[1:]:NEWLINE this_patcher = _patch(NEWLINE getter, attribute, new, spec, create, spec_set,NEWLINE autospec, new_callable, {}NEWLINE )NEWLINE this_patcher.attribute_name = attributeNEWLINE patcher.additional_patchers.append(this_patcher)NEWLINE return patcherNEWLINENEWLINENEWLINEdef patch(NEWLINE target, new=DEFAULT, spec=None, create=False,NEWLINE spec_set=None, autospec=None, new_callable=None, **kwargsNEWLINE ):NEWLINE """NEWLINE `patch` acts as a function decorator, class decorator or a contextNEWLINE manager. Inside the body of the function or with statement, the `target`NEWLINE is patched with a `new` object. When the function/with statement exitsNEWLINE the patch is undone.NEWLINENEWLINE If `new` is omitted, then the target is replaced with aNEWLINE `MagicMock`. If `patch` is used as a decorator and `new` isNEWLINE omitted, the created mock is passed in as an extra argument to theNEWLINE decorated function. If `patch` is used as a context manager the createdNEWLINE mock is returned by the context manager.NEWLINENEWLINE `target` should be a string in the form `'package.module.ClassName'`. TheNEWLINE `target` is imported and the specified object replaced with the `new`NEWLINE object, so the `target` must be importable from the environment you areNEWLINE calling `patch` from. The target is imported when the decorated functionNEWLINE is executed, not at decoration time.NEWLINENEWLINE The `spec` and `spec_set` keyword arguments are passed to the `MagicMock`NEWLINE if patch is creating one for you.NEWLINENEWLINE In addition you can pass `spec=True` or `spec_set=True`, which causesNEWLINE patch to pass in the object being mocked as the spec/spec_set object.NEWLINENEWLINE `new_callable` allows you to specify a different class, or callable object,NEWLINE that will be called to create the `new` object. By default `MagicMock` isNEWLINE used.NEWLINENEWLINE A more powerful form of `spec` is `autospec`. If you set `autospec=True`NEWLINE then the mock with be created with a spec from the object being replaced.NEWLINE All attributes of the mock will also have the spec of the correspondingNEWLINE attribute of the object being replaced. Methods and functions beingNEWLINE mocked will have their arguments checked and will raise a `TypeError` ifNEWLINE they are called with the wrong signature. For mocks replacing a class,NEWLINE their return value (the 'instance') will have the same spec as the class.NEWLINENEWLINE Instead of `autospec=True` you can pass `autospec=some_object` to use anNEWLINE arbitrary object as the spec instead of the one being replaced.NEWLINENEWLINE By default `patch` will fail to replace attributes that don't exist. IfNEWLINE you pass in `create=True`, and the attribute doesn't exist, patch willNEWLINE create the attribute for you when the patched function is called, andNEWLINE delete it again afterwards. This is useful for writing tests againstNEWLINE attributes that your production code creates at runtime. It is off byNEWLINE default because it can be dangerous. With it switched on you can writeNEWLINE passing tests against APIs that don't actually exist!NEWLINENEWLINE Patch can be used as a `TestCase` class decorator. It works byNEWLINE decorating each test method in the class. This reduces the boilerplateNEWLINE code when your test methods share a common patchings set. `patch` findsNEWLINE tests by looking for method names that start with `patch.TEST_PREFIX`.NEWLINE By default this is `test`, which matches the way `unittest` finds tests.NEWLINE You can specify an alternative prefix by setting `patch.TEST_PREFIX`.NEWLINENEWLINE Patch can be used as a context manager, with the with statement. Here theNEWLINE patching applies to the indented block after the with statement. If youNEWLINE use "as" then the patched object will be bound to the name after theNEWLINE "as"; very useful if `patch` is creating a mock object for you.NEWLINENEWLINE `patch` takes arbitrary keyword arguments. These will be passed toNEWLINE the `Mock` (or `new_callable`) on construction.NEWLINENEWLINE `patch.dict(...)`, `patch.multiple(...)` and `patch.object(...)` areNEWLINE available for alternate use-cases.NEWLINE """NEWLINE getter, attribute = _get_target(target)NEWLINE return _patch(NEWLINE getter, attribute, new, spec, create,NEWLINE spec_set, autospec, new_callable, kwargsNEWLINE )NEWLINENEWLINENEWLINEclass _patch_dict(object):NEWLINE """NEWLINE Patch a dictionary, or dictionary like object, and restore the dictionaryNEWLINE to its original state after the test.NEWLINENEWLINE `in_dict` can be a dictionary or a mapping like container. If it is aNEWLINE mapping then it must at least support getting, setting and deleting itemsNEWLINE plus iterating over keys.NEWLINENEWLINE `in_dict` can also be a string specifying the name of the dictionary, whichNEWLINE will then be fetched by importing it.NEWLINENEWLINE `values` can be a dictionary of values to set in the dictionary. `values`NEWLINE can also be an iterable of `(key, value)` pairs.NEWLINENEWLINE If `clear` is True then the dictionary will be cleared before the newNEWLINE values are set.NEWLINENEWLINE `patch.dict` can also be called with arbitrary keyword arguments to setNEWLINE values in the dictionary::NEWLINENEWLINE with patch.dict('sys.modules', mymodule=Mock(), other_module=Mock()):NEWLINE ...NEWLINENEWLINE `patch.dict` can be used as a context manager, decorator or classNEWLINE decorator. When used as a class decorator `patch.dict` honoursNEWLINE `patch.TEST_PREFIX` for choosing which methods to wrap.NEWLINE """NEWLINENEWLINE def __init__(self, in_dict, values=(), clear=False, **kwargs):NEWLINE if isinstance(in_dict, str):NEWLINE in_dict = _importer(in_dict)NEWLINE self.in_dict = in_dictNEWLINE # support any argument supported by dict(...) constructorNEWLINE self.values = dict(values)NEWLINE self.values.update(kwargs)NEWLINE self.clear = clearNEWLINE self._original = NoneNEWLINENEWLINENEWLINE def __call__(self, f):NEWLINE if isinstance(f, type):NEWLINE return self.decorate_class(f)NEWLINE @wraps(f)NEWLINE def _inner(*args, **kw):NEWLINE self._patch_dict()NEWLINE try:NEWLINE return f(*args, **kw)NEWLINE finally:NEWLINE self._unpatch_dict()NEWLINENEWLINE return _innerNEWLINENEWLINENEWLINE def decorate_class(self, klass):NEWLINE for attr in dir(klass):NEWLINE attr_value = getattr(klass, attr)NEWLINE if (attr.startswith(patch.TEST_PREFIX) andNEWLINE hasattr(attr_value, "__call__")):NEWLINE decorator = _patch_dict(self.in_dict, self.values, self.clear)NEWLINE decorated = decorator(attr_value)NEWLINE setattr(klass, attr, decorated)NEWLINE return klassNEWLINENEWLINENEWLINE def __enter__(self):NEWLINE """Patch the dict."""NEWLINE self._patch_dict()NEWLINENEWLINENEWLINE def _patch_dict(self):NEWLINE values = self.valuesNEWLINE in_dict = self.in_dictNEWLINE clear = self.clearNEWLINENEWLINE try:NEWLINE original = in_dict.copy()NEWLINE except AttributeError:NEWLINE # dict like object with no copy methodNEWLINE # must support iteration over keysNEWLINE original = {}NEWLINE for key in in_dict:NEWLINE original[key] = in_dict[key]NEWLINE self._original = originalNEWLINENEWLINE if clear:NEWLINE _clear_dict(in_dict)NEWLINENEWLINE try:NEWLINE in_dict.update(values)NEWLINE except AttributeError:NEWLINE # dict like object with no update methodNEWLINE for key in values:NEWLINE in_dict[key] = values[key]NEWLINENEWLINENEWLINE def _unpatch_dict(self):NEWLINE in_dict = self.in_dictNEWLINE original = self._originalNEWLINENEWLINE _clear_dict(in_dict)NEWLINENEWLINE try:NEWLINE in_dict.update(original)NEWLINE except AttributeError:NEWLINE for key in original:NEWLINE in_dict[key] = original[key]NEWLINENEWLINENEWLINE def __exit__(self, *args):NEWLINE """Unpatch the dict."""NEWLINE self._unpatch_dict()NEWLINE return FalseNEWLINENEWLINE start = __enter__NEWLINE stop = __exit__NEWLINENEWLINENEWLINEdef _clear_dict(in_dict):NEWLINE try:NEWLINE in_dict.clear()NEWLINE except AttributeError:NEWLINE keys = list(in_dict)NEWLINE for key in keys:NEWLINE del in_dict[key]NEWLINENEWLINENEWLINEdef _patch_stopall():NEWLINE """Stop all active patches. LIFO to unroll nested patches."""NEWLINE for patch in reversed(_patch._active_patches):NEWLINE patch.stop()NEWLINENEWLINENEWLINEpatch.object = _patch_objectNEWLINEpatch.dict = _patch_dictNEWLINEpatch.multiple = _patch_multipleNEWLINEpatch.stopall = _patch_stopallNEWLINEpatch.TEST_PREFIX = 'test'NEWLINENEWLINEmagic_methods = (NEWLINE "lt le gt ge eq ne "NEWLINE "getitem setitem delitem "NEWLINE "len contains iter "NEWLINE "hash str sizeof "NEWLINE "enter exit "NEWLINE "divmod neg pos abs invert "NEWLINE "complex int float index "NEWLINE "trunc floor ceil "NEWLINE "bool next "NEWLINE)NEWLINENEWLINEnumerics = (NEWLINE "add sub mul div floordiv mod lshift rshift and xor or pow truediv"NEWLINE)NEWLINEinplace = ' '.join('i%s' % n for n in numerics.split())NEWLINEright = ' '.join('r%s' % n for n in numerics.split())NEWLINENEWLINE# not including __prepare__, __instancecheck__, __subclasscheck__NEWLINE# (as they are metaclass methods)NEWLINE# __del__ is not supported at all as it causes problems if it existsNEWLINENEWLINE_non_defaults = set('__%s__' % method for method in [NEWLINE 'get', 'set', 'delete', 'reversed', 'missing', 'reduce', 'reduce_ex',NEWLINE 'getinitargs', 'getnewargs', 'getstate', 'setstate', 'getformat',NEWLINE 'setformat', 'repr', 'dir', 'subclasses', 'format',NEWLINE])NEWLINENEWLINENEWLINEdef _get_method(name, func):NEWLINE "Turns a callable object (like a mock) into a real function"NEWLINE def method(self, *args, **kw):NEWLINE return func(self, *args, **kw)NEWLINE method.__name__ = nameNEWLINE return methodNEWLINENEWLINENEWLINE_magics = set(NEWLINE '__%s__' % method for method inNEWLINE ' '.join([magic_methods, numerics, inplace, right]).split()NEWLINE)NEWLINENEWLINE_all_magics = _magics | _non_defaultsNEWLINENEWLINE_unsupported_magics = set([NEWLINE '__getattr__', '__setattr__',NEWLINE '__init__', '__new__', '__prepare__'NEWLINE '__instancecheck__', '__subclasscheck__',NEWLINE '__del__'NEWLINE])NEWLINENEWLINE_calculate_return_value = {NEWLINE '__hash__': lambda self: object.__hash__(self),NEWLINE '__str__': lambda self: object.__str__(self),NEWLINE '__sizeof__': lambda self: object.__sizeof__(self),NEWLINE}NEWLINENEWLINE_return_values = {NEWLINE '__lt__': NotImplemented,NEWLINE '__gt__': NotImplemented,NEWLINE '__le__': NotImplemented,NEWLINE '__ge__': NotImplemented,NEWLINE '__int__': 1,NEWLINE '__contains__': False,NEWLINE '__len__': 0,NEWLINE '__exit__': False,NEWLINE '__complex__': 1j,NEWLINE '__float__': 1.0,NEWLINE '__bool__': True,NEWLINE '__index__': 1,NEWLINE}NEWLINENEWLINENEWLINEdef _get_eq(self):NEWLINE def __eq__(other):NEWLINE ret_val = self.__eq__._mock_return_valueNEWLINE if ret_val is not DEFAULT:NEWLINE return ret_valNEWLINE return self is otherNEWLINE return __eq__NEWLINENEWLINEdef _get_ne(self):NEWLINE def __ne__(other):NEWLINE if self.__ne__._mock_return_value is not DEFAULT:NEWLINE return DEFAULTNEWLINE return self is not otherNEWLINE return __ne__NEWLINENEWLINEdef _get_iter(self):NEWLINE def __iter__():NEWLINE ret_val = self.__iter__._mock_return_valueNEWLINE if ret_val is DEFAULT:NEWLINE return iter([])NEWLINE # if ret_val was already an iterator, then calling iter on it shouldNEWLINE # return the iterator unchangedNEWLINE return iter(ret_val)NEWLINE return __iter__NEWLINENEWLINE_side_effect_methods = {NEWLINE '__eq__': _get_eq,NEWLINE '__ne__': _get_ne,NEWLINE '__iter__': _get_iter,NEWLINE}NEWLINENEWLINENEWLINENEWLINEdef _set_return_value(mock, method, name):NEWLINE fixed = _return_values.get(name, DEFAULT)NEWLINE if fixed is not DEFAULT:NEWLINE method.return_value = fixedNEWLINE returnNEWLINENEWLINE return_calulator = _calculate_return_value.get(name)NEWLINE if return_calulator is not None:NEWLINE try:NEWLINE return_value = return_calulator(mock)NEWLINE except AttributeError:NEWLINE # XXXX why do we return AttributeError here?NEWLINE # set it as a side_effect instead?NEWLINE return_value = AttributeError(name)NEWLINE method.return_value = return_valueNEWLINE returnNEWLINENEWLINE side_effector = _side_effect_methods.get(name)NEWLINE if side_effector is not None:NEWLINE method.side_effect = side_effector(mock)NEWLINENEWLINENEWLINENEWLINEclass MagicMixin(object):NEWLINE def __init__(self, *args, **kw):NEWLINE _safe_super(MagicMixin, self).__init__(*args, **kw)NEWLINE self._mock_set_magics()NEWLINENEWLINENEWLINE def _mock_set_magics(self):NEWLINE these_magics = _magicsNEWLINENEWLINE if self._mock_methods is not None:NEWLINE these_magics = _magics.intersection(self._mock_methods)NEWLINENEWLINE remove_magics = set()NEWLINE remove_magics = _magics - these_magicsNEWLINENEWLINE for entry in remove_magics:NEWLINE if entry in type(self).__dict__:NEWLINE # remove unneeded magic methodsNEWLINE delattr(self, entry)NEWLINENEWLINE # don't overwrite existing attributes if called a second timeNEWLINE these_magics = these_magics - set(type(self).__dict__)NEWLINENEWLINE _type = type(self)NEWLINE for entry in these_magics:NEWLINE setattr(_type, entry, MagicProxy(entry, self))NEWLINENEWLINENEWLINENEWLINEclass NonCallableMagicMock(MagicMixin, NonCallableMock):NEWLINE """A version of `MagicMock` that isn't callable."""NEWLINE def mock_add_spec(self, spec, spec_set=False):NEWLINE """Add a spec to a mock. `spec` can either be an object or aNEWLINE list of strings. Only attributes on the `spec` can be fetched asNEWLINE attributes from the mock.NEWLINENEWLINE If `spec_set` is True then only attributes on the spec can be set."""NEWLINE self._mock_add_spec(spec, spec_set)NEWLINE self._mock_set_magics()NEWLINENEWLINENEWLINENEWLINEclass MagicMock(MagicMixin, Mock):NEWLINE """NEWLINE MagicMock is a subclass of Mock with default implementationsNEWLINE of most of the magic methods. You can use MagicMock without having toNEWLINE configure the magic methods yourself.NEWLINENEWLINE If you use the `spec` or `spec_set` arguments then *only* magicNEWLINE methods that exist in the spec will be created.NEWLINENEWLINE Attributes and the return value of a `MagicMock` will also be `MagicMocks`.NEWLINE """NEWLINE def mock_add_spec(self, spec, spec_set=False):NEWLINE """Add a spec to a mock. `spec` can either be an object or aNEWLINE list of strings. Only attributes on the `spec` can be fetched asNEWLINE attributes from the mock.NEWLINENEWLINE If `spec_set` is True then only attributes on the spec can be set."""NEWLINE self._mock_add_spec(spec, spec_set)NEWLINE self._mock_set_magics()NEWLINENEWLINENEWLINENEWLINEclass MagicProxy(object):NEWLINE def __init__(self, name, parent):NEWLINE self.name = nameNEWLINE self.parent = parentNEWLINENEWLINE def __call__(self, *args, **kwargs):NEWLINE m = self.create_mock()NEWLINE return m(*args, **kwargs)NEWLINENEWLINE def create_mock(self):NEWLINE entry = self.nameNEWLINE parent = self.parentNEWLINE m = parent._get_child_mock(name=entry, _new_name=entry,NEWLINE _new_parent=parent)NEWLINE setattr(parent, entry, m)NEWLINE _set_return_value(parent, m, entry)NEWLINE return mNEWLINENEWLINE def __get__(self, obj, _type=None):NEWLINE return self.create_mock()NEWLINENEWLINENEWLINENEWLINEclass _ANY(object):NEWLINE "A helper object that compares equal to everything."NEWLINENEWLINE def __eq__(self, other):NEWLINE return TrueNEWLINENEWLINE def __ne__(self, other):NEWLINE return FalseNEWLINENEWLINE def __repr__(self):NEWLINE return '<ANY>'NEWLINENEWLINEANY = _ANY()NEWLINENEWLINENEWLINENEWLINEdef _format_call_signature(name, args, kwargs):NEWLINE message = '%s(%%s)' % nameNEWLINE formatted_args = ''NEWLINE args_string = ', '.join([repr(arg) for arg in args])NEWLINE kwargs_string = ', '.join([NEWLINE '%s=%r' % (key, value) for key, value in sorted(kwargs.items())NEWLINE ])NEWLINE if args_string:NEWLINE formatted_args = args_stringNEWLINE if kwargs_string:NEWLINE if formatted_args:NEWLINE formatted_args += ', 'NEWLINE formatted_args += kwargs_stringNEWLINENEWLINE return message % formatted_argsNEWLINENEWLINENEWLINENEWLINEclass _Call(tuple):NEWLINE """NEWLINE A tuple for holding the results of a call to a mock, either in the formNEWLINE `(args, kwargs)` or `(name, args, kwargs)`.NEWLINENEWLINE If args or kwargs are empty then a call tuple will compare equal toNEWLINE a tuple without those values. This makes comparisons less verbose::NEWLINENEWLINE _Call(('name', (), {})) == ('name',)NEWLINE _Call(('name', (1,), {})) == ('name', (1,))NEWLINE _Call(((), {'a': 'b'})) == ({'a': 'b'},)NEWLINENEWLINE The `_Call` object provides a useful shortcut for comparing with call::NEWLINENEWLINE _Call(((1, 2), {'a': 3})) == call(1, 2, a=3)NEWLINE _Call(('foo', (1, 2), {'a': 3})) == call.foo(1, 2, a=3)NEWLINENEWLINE If the _Call has no name then it will match any name.NEWLINE """NEWLINE def __new__(cls, value=(), name=None, parent=None, two=False,NEWLINE from_kall=True):NEWLINE name = ''NEWLINE args = ()NEWLINE kwargs = {}NEWLINE _len = len(value)NEWLINE if _len == 3:NEWLINE name, args, kwargs = valueNEWLINE elif _len == 2:NEWLINE first, second = valueNEWLINE if isinstance(first, str):NEWLINE name = firstNEWLINE if isinstance(second, tuple):NEWLINE args = secondNEWLINE else:NEWLINE kwargs = secondNEWLINE else:NEWLINE args, kwargs = first, secondNEWLINE elif _len == 1:NEWLINE value, = valueNEWLINE if isinstance(value, str):NEWLINE name = valueNEWLINE elif isinstance(value, tuple):NEWLINE args = valueNEWLINE else:NEWLINE kwargs = valueNEWLINENEWLINE if two:NEWLINE return tuple.__new__(cls, (args, kwargs))NEWLINENEWLINE return tuple.__new__(cls, (name, args, kwargs))NEWLINENEWLINENEWLINE def __init__(self, value=(), name=None, parent=None, two=False,NEWLINE from_kall=True):NEWLINE self.name = nameNEWLINE self.parent = parentNEWLINE self.from_kall = from_kallNEWLINENEWLINENEWLINE def __eq__(self, other):NEWLINE if other is ANY:NEWLINE return TrueNEWLINE try:NEWLINE len_other = len(other)NEWLINE except TypeError:NEWLINE return FalseNEWLINENEWLINE self_name = ''NEWLINE if len(self) == 2:NEWLINE self_args, self_kwargs = selfNEWLINE else:NEWLINE self_name, self_args, self_kwargs = selfNEWLINENEWLINE other_name = ''NEWLINE if len_other == 0:NEWLINE other_args, other_kwargs = (), {}NEWLINE elif len_other == 3:NEWLINE other_name, other_args, other_kwargs = otherNEWLINE elif len_other == 1:NEWLINE value, = otherNEWLINE if isinstance(value, tuple):NEWLINE other_args = valueNEWLINE other_kwargs = {}NEWLINE elif isinstance(value, str):NEWLINE other_name = valueNEWLINE other_args, other_kwargs = (), {}NEWLINE else:NEWLINE other_args = ()NEWLINE other_kwargs = valueNEWLINE else:NEWLINE # len 2NEWLINE # could be (name, args) or (name, kwargs) or (args, kwargs)NEWLINE first, second = otherNEWLINE if isinstance(first, str):NEWLINE other_name = firstNEWLINE if isinstance(second, tuple):NEWLINE other_args, other_kwargs = second, {}NEWLINE else:NEWLINE other_args, other_kwargs = (), secondNEWLINE else:NEWLINE other_args, other_kwargs = first, secondNEWLINENEWLINE if self_name and other_name != self_name:NEWLINE return FalseNEWLINENEWLINE # this order is important for ANY to work!NEWLINE return (other_args, other_kwargs) == (self_args, self_kwargs)NEWLINENEWLINENEWLINE def __ne__(self, other):NEWLINE return not self.__eq__(other)NEWLINENEWLINENEWLINE def __call__(self, *args, **kwargs):NEWLINE if self.name is None:NEWLINE return _Call(('', args, kwargs), name='()')NEWLINENEWLINE name = self.name + '()'NEWLINE return _Call((self.name, args, kwargs), name=name, parent=self)NEWLINENEWLINENEWLINE def __getattr__(self, attr):NEWLINE if self.name is None:NEWLINE return _Call(name=attr, from_kall=False)NEWLINE name = '%s.%s' % (self.name, attr)NEWLINE return _Call(name=name, parent=self, from_kall=False)NEWLINENEWLINENEWLINE def __repr__(self):NEWLINE if not self.from_kall:NEWLINE name = self.name or 'call'NEWLINE if name.startswith('()'):NEWLINE name = 'call%s' % nameNEWLINE return nameNEWLINENEWLINE if len(self) == 2:NEWLINE name = 'call'NEWLINE args, kwargs = selfNEWLINE else:NEWLINE name, args, kwargs = selfNEWLINE if not name:NEWLINE name = 'call'NEWLINE elif not name.startswith('()'):NEWLINE name = 'call.%s' % nameNEWLINE else:NEWLINE name = 'call%s' % nameNEWLINE return _format_call_signature(name, args, kwargs)NEWLINENEWLINENEWLINE def call_list(self):NEWLINE """For a call object that represents multiple calls, `call_list`NEWLINE returns a list of all the intermediate calls as well as theNEWLINE final call."""NEWLINE vals = []NEWLINE thing = selfNEWLINE while thing is not None:NEWLINE if thing.from_kall:NEWLINE vals.append(thing)NEWLINE thing = thing.parentNEWLINE return _CallList(reversed(vals))NEWLINENEWLINENEWLINEcall = _Call(from_kall=False)NEWLINENEWLINENEWLINENEWLINEdef create_autospec(spec, spec_set=False, instance=False, _parent=None,NEWLINE _name=None, **kwargs):NEWLINE """Create a mock object using another object as a spec. Attributes on theNEWLINE mock will use the corresponding attribute on the `spec` object as theirNEWLINE spec.NEWLINENEWLINE Functions or methods being mocked will have their arguments checkedNEWLINE to check that they are called with the correct signature.NEWLINENEWLINE If `spec_set` is True then attempting to set attributes that don't existNEWLINE on the spec object will raise an `AttributeError`.NEWLINENEWLINE If a class is used as a spec then the return value of the mock (theNEWLINE instance of the class) will have the same spec. You can use a class as theNEWLINE spec for an instance object by passing `instance=True`. The returned mockNEWLINE will only be callable if instances of the mock are callable.NEWLINENEWLINE `create_autospec` also takes arbitrary keyword arguments that are passed toNEWLINE the constructor of the created mock."""NEWLINE if _is_list(spec):NEWLINE # can't pass a list instance to the mock constructor as it will beNEWLINE # interpreted as a list of stringsNEWLINE spec = type(spec)NEWLINENEWLINE is_type = isinstance(spec, type)NEWLINENEWLINE _kwargs = {'spec': spec}NEWLINE if spec_set:NEWLINE _kwargs = {'spec_set': spec}NEWLINE elif spec is None:NEWLINE # None we mock with a normal mock without a specNEWLINE _kwargs = {}NEWLINE if _kwargs and instance:NEWLINE _kwargs['_spec_as_instance'] = TrueNEWLINENEWLINE _kwargs.update(kwargs)NEWLINENEWLINE Klass = MagicMockNEWLINE if type(spec) in DescriptorTypes:NEWLINE # descriptors don't have a specNEWLINE # because we don't know what type they returnNEWLINE _kwargs = {}NEWLINE elif not _callable(spec):NEWLINE Klass = NonCallableMagicMockNEWLINE elif is_type and instance and not _instance_callable(spec):NEWLINE Klass = NonCallableMagicMockNEWLINENEWLINE _name = _kwargs.pop('name', _name)NEWLINENEWLINE _new_name = _nameNEWLINE if _parent is None:NEWLINE # for a top level object no _new_name should be setNEWLINE _new_name = ''NEWLINENEWLINE mock = Klass(parent=_parent, _new_parent=_parent, _new_name=_new_name,NEWLINE name=_name, **_kwargs)NEWLINENEWLINE if isinstance(spec, FunctionTypes):NEWLINE # should only happen at the top level because we don'tNEWLINE # recurse for functionsNEWLINE mock = _set_signature(mock, spec)NEWLINE else:NEWLINE _check_signature(spec, mock, is_type, instance)NEWLINENEWLINE if _parent is not None and not instance:NEWLINE _parent._mock_children[_name] = mockNEWLINENEWLINE if is_type and not instance and 'return_value' not in kwargs:NEWLINE mock.return_value = create_autospec(spec, spec_set, instance=True,NEWLINE _name='()', _parent=mock)NEWLINENEWLINE for entry in dir(spec):NEWLINE if _is_magic(entry):NEWLINE # MagicMock already does the useful magic methods for usNEWLINE continueNEWLINENEWLINE # XXXX do we need a better way of getting attributes withoutNEWLINE # triggering code execution (?) Probably not - we need the actualNEWLINE # object to mock it so we would rather trigger a property than mockNEWLINE # the property descriptor. Likewise we want to mock out dynamicallyNEWLINE # provided attributes.NEWLINE # XXXX what about attributes that raise exceptions other thanNEWLINE # AttributeError on being fetched?NEWLINE # we could be resilient against it, or catch and propagate theNEWLINE # exception when the attribute is fetched from the mockNEWLINE try:NEWLINE original = getattr(spec, entry)NEWLINE except AttributeError:NEWLINE continueNEWLINENEWLINE kwargs = {'spec': original}NEWLINE if spec_set:NEWLINE kwargs = {'spec_set': original}NEWLINENEWLINE if not isinstance(original, FunctionTypes):NEWLINE new = _SpecState(original, spec_set, mock, entry, instance)NEWLINE mock._mock_children[entry] = newNEWLINE else:NEWLINE parent = mockNEWLINE if isinstance(spec, FunctionTypes):NEWLINE parent = mock.mockNEWLINENEWLINE skipfirst = _must_skip(spec, entry, is_type)NEWLINE kwargs['_eat_self'] = skipfirstNEWLINE new = MagicMock(parent=parent, name=entry, _new_name=entry,NEWLINE _new_parent=parent,NEWLINE **kwargs)NEWLINE mock._mock_children[entry] = newNEWLINE _check_signature(original, new, skipfirst=skipfirst)NEWLINENEWLINE # so functions created with _set_signature become instance attributes,NEWLINE # *plus* their underlying mock exists in _mock_children of the parentNEWLINE # mock. Adding to _mock_children may be unnecessary where we are alsoNEWLINE # setting as an instance attribute?NEWLINE if isinstance(new, FunctionTypes):NEWLINE setattr(mock, entry, new)NEWLINENEWLINE return mockNEWLINENEWLINENEWLINEdef _must_skip(spec, entry, is_type):NEWLINE """NEWLINE Return whether we should skip the first argument on spec's `entry`NEWLINE attribute.NEWLINE """NEWLINE if not isinstance(spec, type):NEWLINE if entry in getattr(spec, '__dict__', {}):NEWLINE # instance attribute - shouldn't skipNEWLINE return FalseNEWLINE spec = spec.__class__NEWLINENEWLINE for klass in spec.__mro__:NEWLINE result = klass.__dict__.get(entry, DEFAULT)NEWLINE if result is DEFAULT:NEWLINE continueNEWLINE if isinstance(result, (staticmethod, classmethod)):NEWLINE return FalseNEWLINE elif isinstance(getattr(result, '__get__', None), MethodWrapperTypes):NEWLINE # Normal method => skip if looked up on typeNEWLINE # (if looked up on instance, self is already skipped)NEWLINE return is_typeNEWLINE else:NEWLINE return FalseNEWLINENEWLINE # shouldn't get here unless function is a dynamically provided attributeNEWLINE # XXXX untested behaviourNEWLINE return is_typeNEWLINENEWLINENEWLINEdef _get_class(obj):NEWLINE try:NEWLINE return obj.__class__NEWLINE except AttributeError:NEWLINE # it is possible for objects to have no __class__NEWLINE return type(obj)NEWLINENEWLINENEWLINEclass _SpecState(object):NEWLINENEWLINE def __init__(self, spec, spec_set=False, parent=None,NEWLINE name=None, ids=None, instance=False):NEWLINE self.spec = specNEWLINE self.ids = idsNEWLINE self.spec_set = spec_setNEWLINE self.parent = parentNEWLINE self.instance = instanceNEWLINE self.name = nameNEWLINENEWLINENEWLINEFunctionTypes = (NEWLINE # python functionNEWLINE type(create_autospec),NEWLINE # instance methodNEWLINE type(ANY.__eq__),NEWLINE)NEWLINENEWLINEMethodWrapperTypes = (NEWLINE type(ANY.__eq__.__get__),NEWLINE)NEWLINENEWLINENEWLINEfile_spec = NoneNEWLINENEWLINEdef _iterate_read_data(read_data):NEWLINE # Helper for mock_open:NEWLINE # Retrieve lines from read_data via a generator so that separate calls toNEWLINE # readline, read, and readlines are properly interleavedNEWLINE data_as_list = ['{}\n'.format(l) for l in read_data.split('\n')]NEWLINENEWLINE if data_as_list[-1] == '\n':NEWLINE # If the last line ended in a newline, the list comprehension will have anNEWLINE # extra entry that's just a newline. Remove this.NEWLINE data_as_list = data_as_list[:-1]NEWLINE else:NEWLINE # If there wasn't an extra newline by itself, then the file beingNEWLINE # emulated doesn't have a newline to end the last line remove theNEWLINE # newline that our naive format() addedNEWLINE data_as_list[-1] = data_as_list[-1][:-1]NEWLINENEWLINE for line in data_as_list:NEWLINE yield lineNEWLINENEWLINEdef mock_open(mock=None, read_data=''):NEWLINE """NEWLINE A helper function to create a mock to replace the use of `open`. It worksNEWLINE for `open` called directly or used as a context manager.NEWLINENEWLINE The `mock` argument is the mock object to configure. If `None` (theNEWLINE default) then a `MagicMock` will be created for you, with the API limitedNEWLINE to methods or attributes available on standard file handles.NEWLINENEWLINE `read_data` is a string for the `read` methoddline`, and `readlines` of theNEWLINE file handle to return. This is an empty string by default.NEWLINE """NEWLINE def _readlines_side_effect(*args, **kwargs):NEWLINE if handle.readlines.return_value is not None:NEWLINE return handle.readlines.return_valueNEWLINE return list(_data)NEWLINENEWLINE def _read_side_effect(*args, **kwargs):NEWLINE if handle.read.return_value is not None:NEWLINE return handle.read.return_valueNEWLINE return ''.join(_data)NEWLINENEWLINE def _readline_side_effect():NEWLINE if handle.readline.return_value is not None:NEWLINE while True:NEWLINE yield handle.readline.return_valueNEWLINE for line in _data:NEWLINE yield lineNEWLINENEWLINENEWLINE global file_specNEWLINE if file_spec is None:NEWLINE import _ioNEWLINE file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))NEWLINENEWLINE if mock is None:NEWLINE mock = MagicMock(name='open', spec=open)NEWLINENEWLINE handle = MagicMock(spec=file_spec)NEWLINE handle.__enter__.return_value = handleNEWLINENEWLINE _data = _iterate_read_data(read_data)NEWLINENEWLINE handle.write.return_value = NoneNEWLINE handle.read.return_value = NoneNEWLINE handle.readline.return_value = NoneNEWLINE handle.readlines.return_value = NoneNEWLINENEWLINE handle.read.side_effect = _read_side_effectNEWLINE handle.readline.side_effect = _readline_side_effect()NEWLINE handle.readlines.side_effect = _readlines_side_effectNEWLINENEWLINE mock.return_value = handleNEWLINE return mockNEWLINENEWLINENEWLINEclass PropertyMock(Mock):NEWLINE """NEWLINE A mock intended to be used as a property, or other descriptor, on a class.NEWLINE `PropertyMock` provides `__get__` and `__set__` methods so you can specifyNEWLINE a return value when it is fetched.NEWLINENEWLINE Fetching a `PropertyMock` instance from an object calls the mock, withNEWLINE no args. Setting it calls the mock with the value being set.NEWLINE """NEWLINE def _get_child_mock(self, **kwargs):NEWLINE return MagicMock(**kwargs)NEWLINENEWLINE def __get__(self, obj, obj_type):NEWLINE return self()NEWLINE def __set__(self, obj, val):NEWLINE self(val)NEWLINE
#!/usr/bin/pythonNEWLINE# -*- coding: utf-8 -*-NEWLINE#NEWLINE# Copyright: (c) 2018, F5 Networks Inc.NEWLINE# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)NEWLINENEWLINEfrom __future__ import absolute_import, division, print_functionNEWLINE__metaclass__ = typeNEWLINENEWLINEDOCUMENTATION = r'''NEWLINE---NEWLINEmodule: bigip_gtm_globalNEWLINEshort_description: Manages global GTM settingsNEWLINEdescription:NEWLINE - Manages global BIG-IP GTM (now BIG-IP DNS) settings. These settings include general, load balancing, and metricsNEWLINE related settings.NEWLINEversion_added: "1.0.0"NEWLINEoptions:NEWLINE synchronization:NEWLINE description:NEWLINE - Specifies whether this system is a member of a synchronization group.NEWLINE - When you enable synchronization, the system periodically queries other systems inNEWLINE the synchronization group to obtain and distribute configuration and metrics collectionNEWLINE updates.NEWLINE - The synchronization group may contain systems configured as Global Traffic Manager (DNS) andNEWLINE Link Controller systems.NEWLINE type: boolNEWLINE synchronization_group_name:NEWLINE description:NEWLINE - Specifies the name of the synchronization group to which the system belongs.NEWLINE type: strNEWLINE synchronize_zone_files:NEWLINE description:NEWLINE - Specifies the system synchronizes Domain Name System (DNS) zone files among theNEWLINE synchronization group members.NEWLINE type: boolNEWLINEextends_documentation_fragment: f5networks.f5_modules.f5NEWLINEauthor:NEWLINE - Tim Rupp (@caphrim007)NEWLINE - Wojciech Wypior (@wojtek0806)NEWLINE'''NEWLINENEWLINEEXAMPLES = r'''NEWLINE- name: Configure synchronization settingsNEWLINE bigip_gtm_global:NEWLINE synchronization: yesNEWLINE synchronization_group_name: my-groupNEWLINE synchronize_zone_files: yesNEWLINE state: presentNEWLINE provider:NEWLINE user: adminNEWLINE password: secretNEWLINE server: lb.mydomain.comNEWLINE delegate_to: localhostNEWLINE'''NEWLINENEWLINERETURN = r'''NEWLINEsynchronization:NEWLINE description: The synchronization setting on the system.NEWLINE returned: changedNEWLINE type: boolNEWLINE sample: trueNEWLINEsynchronization_group_name:NEWLINE description: The synchronization group name.NEWLINE returned: changedNEWLINE type: strNEWLINE sample: my-groupNEWLINEsynchronize_zone_files:NEWLINE description: Whether or not the system will synchronize zone files.NEWLINE returned: changedNEWLINE type: strNEWLINE sample: my-groupNEWLINE'''NEWLINEfrom datetime import datetimeNEWLINEfrom ansible.module_utils.basic import AnsibleModuleNEWLINENEWLINEfrom ..module_utils.bigip import F5RestClientNEWLINEfrom ..module_utils.common import (NEWLINE F5ModuleError, AnsibleF5Parameters, f5_argument_specNEWLINE)NEWLINEfrom ..module_utils.icontrol import (NEWLINE module_provisioned, tmos_versionNEWLINE)NEWLINEfrom ..module_utils.teem import send_teemNEWLINENEWLINENEWLINEclass Parameters(AnsibleF5Parameters):NEWLINE api_map = {NEWLINE 'synchronizationGroupName': 'synchronization_group_name',NEWLINE 'synchronizeZoneFiles': 'synchronize_zone_files',NEWLINE }NEWLINENEWLINE api_attributes = [NEWLINE 'synchronizeZoneFiles',NEWLINE 'synchronizationGroupName',NEWLINE 'synchronization',NEWLINE ]NEWLINENEWLINE returnables = [NEWLINE 'synchronization',NEWLINE 'synchronization_group_name',NEWLINE 'synchronize_zone_files',NEWLINE ]NEWLINENEWLINE updatables = [NEWLINE 'synchronization',NEWLINE 'synchronization_group_name',NEWLINE 'synchronize_zone_files',NEWLINE ]NEWLINENEWLINENEWLINEclass ApiParameters(Parameters):NEWLINE @propertyNEWLINE def synchronization(self):NEWLINE if self._values['synchronization'] is None:NEWLINE return NoneNEWLINE elif self._values['synchronization'] == 'no':NEWLINE return FalseNEWLINE else:NEWLINE return TrueNEWLINENEWLINE @propertyNEWLINE def synchronize_zone_files(self):NEWLINE if self._values['synchronize_zone_files'] is None:NEWLINE return NoneNEWLINE elif self._values['synchronize_zone_files'] == 'no':NEWLINE return FalseNEWLINE else:NEWLINE return TrueNEWLINENEWLINE @propertyNEWLINE def synchronization_group_name(self):NEWLINE if self._values['synchronization_group_name'] is None:NEWLINE return NoneNEWLINE return str(self._values['synchronization_group_name'])NEWLINENEWLINENEWLINEclass ModuleParameters(Parameters):NEWLINE passNEWLINENEWLINENEWLINEclass Changes(Parameters):NEWLINE def to_return(self):NEWLINE result = {}NEWLINE try:NEWLINE for returnable in self.returnables:NEWLINE result[returnable] = getattr(self, returnable)NEWLINE result = self._filter_params(result)NEWLINE except Exception:NEWLINE raiseNEWLINE return resultNEWLINENEWLINENEWLINEclass UsableChanges(Changes):NEWLINE @propertyNEWLINE def synchronization(self):NEWLINE if self._values['synchronization'] is None:NEWLINE return NoneNEWLINE elif self._values['synchronization'] is False:NEWLINE return 'no'NEWLINE else:NEWLINE return 'yes'NEWLINENEWLINE @propertyNEWLINE def synchronize_zone_files(self):NEWLINE if self._values['synchronize_zone_files'] is None:NEWLINE return NoneNEWLINE elif self._values['synchronize_zone_files'] is False:NEWLINE return 'no'NEWLINE else:NEWLINE return 'yes'NEWLINENEWLINENEWLINEclass ReportableChanges(Changes):NEWLINE passNEWLINENEWLINENEWLINEclass Difference(object):NEWLINE def __init__(self, want, have=None):NEWLINE self.want = wantNEWLINE self.have = haveNEWLINENEWLINE def compare(self, param):NEWLINE try:NEWLINE result = getattr(self, param)NEWLINE return resultNEWLINE except AttributeError:NEWLINE return self.__default(param)NEWLINENEWLINE def __default(self, param):NEWLINE attr1 = getattr(self.want, param)NEWLINE try:NEWLINE attr2 = getattr(self.have, param)NEWLINE if attr1 != attr2:NEWLINE return attr1NEWLINE except AttributeError:NEWLINE return attr1NEWLINENEWLINE @propertyNEWLINE def synchronization_group_name(self):NEWLINE if self.want.synchronization_group_name is None:NEWLINE return NoneNEWLINE if self.want.synchronization_group_name == '' and self.have.synchronization_group_name is None:NEWLINE return NoneNEWLINE if self.want.synchronization_group_name != self.have.synchronization_group_name:NEWLINE return self.want.synchronization_group_nameNEWLINENEWLINENEWLINEclass ModuleManager(object):NEWLINE def __init__(self, *args, **kwargs):NEWLINE self.module = kwargs.get('module', None)NEWLINE self.client = F5RestClient(**self.module.params)NEWLINE self.want = ModuleParameters(params=self.module.params)NEWLINE self.have = ApiParameters()NEWLINE self.changes = UsableChanges()NEWLINENEWLINE def _update_changed_options(self):NEWLINE diff = Difference(self.want, self.have)NEWLINE updatables = Parameters.updatablesNEWLINE changed = dict()NEWLINE for k in updatables:NEWLINE change = diff.compare(k)NEWLINE if change is None:NEWLINE continueNEWLINE else:NEWLINE if isinstance(change, dict):NEWLINE changed.update(change)NEWLINE else:NEWLINE changed[k] = changeNEWLINE if changed:NEWLINE self.changes = UsableChanges(params=changed)NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINE def _announce_deprecations(self, result):NEWLINE warnings = result.pop('__warnings', [])NEWLINE for warning in warnings:NEWLINE self.client.module.deprecate(NEWLINE msg=warning['msg'],NEWLINE version=warning['version']NEWLINE )NEWLINENEWLINE def exec_module(self):NEWLINE start = datetime.now().isoformat()NEWLINE version = tmos_version(self.client)NEWLINE if not module_provisioned(self.client, 'gtm'):NEWLINE raise F5ModuleError(NEWLINE "GTM must be provisioned to use this module."NEWLINE )NEWLINE result = dict()NEWLINENEWLINE changed = self.present()NEWLINENEWLINE reportable = ReportableChanges(params=self.changes.to_return())NEWLINE changes = reportable.to_return()NEWLINE result.update(**changes)NEWLINE result.update(dict(changed=changed))NEWLINE self._announce_deprecations(result)NEWLINE send_teem(start, self.client, self.module, version)NEWLINE return resultNEWLINENEWLINE def present(self):NEWLINE return self.update()NEWLINENEWLINE def update(self):NEWLINE self.have = self.read_current_from_device()NEWLINE if not self.should_update():NEWLINE return FalseNEWLINE if self.module.check_mode:NEWLINE return TrueNEWLINE self.update_on_device()NEWLINE return TrueNEWLINENEWLINE def should_update(self):NEWLINE result = self._update_changed_options()NEWLINE if result:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINE def update_on_device(self):NEWLINE params = self.changes.api_params()NEWLINE uri = "https://{0}:{1}/mgmt/tm/gtm/global-settings/general/".format(NEWLINE self.client.provider['server'],NEWLINE self.client.provider['server_port'],NEWLINE )NEWLINE resp = self.client.api.patch(uri, json=params)NEWLINE try:NEWLINE response = resp.json()NEWLINE except ValueError as ex:NEWLINE raise F5ModuleError(str(ex))NEWLINENEWLINE if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:NEWLINE return TrueNEWLINE raise F5ModuleError(resp.content)NEWLINENEWLINE def read_current_from_device(self):NEWLINE uri = "https://{0}:{1}/mgmt/tm/gtm/global-settings/general/".format(NEWLINE self.client.provider['server'],NEWLINE self.client.provider['server_port'],NEWLINE )NEWLINE resp = self.client.api.get(uri)NEWLINE try:NEWLINE response = resp.json()NEWLINE except ValueError as ex:NEWLINE raise F5ModuleError(str(ex))NEWLINENEWLINE if resp.status in [200, 201] or 'code' in response and response['code'] in [200, 201]:NEWLINE return ApiParameters(params=response)NEWLINE raise F5ModuleError(resp.content)NEWLINENEWLINENEWLINEclass ArgumentSpec(object):NEWLINE def __init__(self):NEWLINE self.supports_check_mode = TrueNEWLINE argument_spec = dict(NEWLINE synchronization=dict(type='bool'),NEWLINE synchronization_group_name=dict(),NEWLINE synchronize_zone_files=dict(type='bool')NEWLINE )NEWLINE self.argument_spec = {}NEWLINE self.argument_spec.update(f5_argument_spec)NEWLINE self.argument_spec.update(argument_spec)NEWLINENEWLINENEWLINEdef main():NEWLINE spec = ArgumentSpec()NEWLINENEWLINE module = AnsibleModule(NEWLINE argument_spec=spec.argument_spec,NEWLINE supports_check_mode=spec.supports_check_mode,NEWLINE )NEWLINENEWLINE try:NEWLINE mm = ModuleManager(module=module)NEWLINE results = mm.exec_module()NEWLINE module.exit_json(**results)NEWLINE except F5ModuleError as ex:NEWLINE module.fail_json(msg=str(ex))NEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE main()NEWLINE
# /run.pyNEWLINEimport osNEWLINEfrom dotenv import load_dotenv, find_dotenvNEWLINENEWLINEfrom src.app import create_appNEWLINENEWLINEload_dotenv(find_dotenv())NEWLINENEWLINEenv_name = os.getenv('FLASK_ENV')NEWLINEapp = create_app(env_name)NEWLINENEWLINEif __name__ == '__main__':NEWLINE port = os.getenv('PORT')NEWLINE app.secret_key ='SECRET_KEY'NEWLINE app.debug = TrueNEWLINE # run appNEWLINE app.run(host='0.0.0.0', port=port)NEWLINE
import inspectNEWLINEimport jsonNEWLINENEWLINENEWLINEclass JsonSerializer(json.JSONEncoder):NEWLINE _KEYS_TRANSFORMATIONS: dict = {'three_ds': '3ds',NEWLINE 'account_holder_type': 'account-holder-type',NEWLINE 'payment_network': 'payment-network',NEWLINE 'from_': 'from'}NEWLINENEWLINE def default(self, obj):NEWLINE if hasattr(obj, 'to_json'):NEWLINE return self.default(obj.to_json())NEWLINE elif hasattr(obj, '__dict__'):NEWLINE props = dict(NEWLINE (key, value)NEWLINE for key, value in inspect.getmembers(obj)NEWLINE if not key.startswith('__')NEWLINE and not inspect.isabstract(value)NEWLINE and not inspect.isbuiltin(value)NEWLINE and not inspect.isfunction(value)NEWLINE and not inspect.isgenerator(value)NEWLINE and not inspect.isgeneratorfunction(value)NEWLINE and not inspect.ismethod(value)NEWLINE and not inspect.ismethoddescriptor(value)NEWLINE and not inspect.isroutine(value)NEWLINE )NEWLINE return self.default(self.apply_key_transformations(props))NEWLINE elif hasattr(obj, 'strftime'):NEWLINE return self.default(obj.replace(microsecond=0).isoformat())NEWLINE return objNEWLINENEWLINE def apply_key_transformations(self, props):NEWLINE for key in self._KEYS_TRANSFORMATIONS:NEWLINE if key in props:NEWLINE props[self._KEYS_TRANSFORMATIONS[key]] = props.pop(key)NEWLINENEWLINE return propsNEWLINE
from flask_wtf import FlaskFormNEWLINEfrom wtforms import StringField,PasswordField,SubmitField, BooleanFieldNEWLINEfrom wtforms.validators import Required,Email,EqualToNEWLINEfrom ..models import UserNEWLINEfrom wtforms import ValidationErrorNEWLINENEWLINENEWLINEclass RegistrationForm(FlaskForm):NEWLINENEWLINE email = StringField('Your Email Address',validators=[Required(),Email()])NEWLINE username = StringField('Enter your username',validators = [Required()])NEWLINE password = PasswordField('Password',validators = [Required(),NEWLINE EqualTo('password_confirm',message = 'Passwords must match')])NEWLINE password_confirm = PasswordField('Confirm Passwords',validators = [Required()])NEWLINE submit = SubmitField('Sign Up')NEWLINENEWLINE def validate_email(self,data_field):NEWLINE if User.query.filter_by(email =data_field.data).first():NEWLINE raise ValidationError('There is an account with that email')NEWLINENEWLINE def validate_username(self,data_field):NEWLINE if User.query.filter_by(username = data_field.data).first():NEWLINE raise ValidationError('That username is taken')NEWLINENEWLINENEWLINEclass LoginForm(FlaskForm):NEWLINE email = StringField('Your Email Address',validators=[Required(),Email()])NEWLINE password = PasswordField('Password',validators =[Required()])NEWLINE remember = BooleanField('Remember me')NEWLINE submit = SubmitField('Sign In')NEWLINE
from pymongo import MongoClient, DESCENDINGNEWLINENEWLINEimport settingsNEWLINEfrom .functions import filteredNEWLINENEWLINEif not settings.MONGODB_URL:NEWLINE raise RuntimeError('MONGODB_URL is not set')NEWLINENEWLINEclient = MongoClient(settings.MONGODB_URL, connect=False)NEWLINENEWLINENEWLINEclass Modlog:NEWLINE def __init__(self) -> None:NEWLINE self.db = client.get_default_database()NEWLINE self.c_entries = self.db['entries']NEWLINE NEWLINE def entry(self, entry_id):NEWLINE entry = self.c_entries.find_one({'id': entry_id}, projection={'_id': 0})NEWLINE return filtered(entry)NEWLINE NEWLINE def entries(self, entry_id, limit=20, filters=None):NEWLINE after_entry = self.entry(entry_id)NEWLINENEWLINE if not filters:NEWLINE filters = {}NEWLINENEWLINE if after_entry:NEWLINE filters['created_utc'] = {'$lt': after_entry['created_utc']}NEWLINE NEWLINE entries = self.c_entries.find(filters, limit=limit, sort=[('created_utc', DESCENDING)], projection={'_id': 0})NEWLINE return filtered(entries)NEWLINENEWLINE _instance = NoneNEWLINENEWLINE @classmethodNEWLINE def get_instance(cls):NEWLINE if not cls._instance:NEWLINE cls._instance = cls()NEWLINE return cls._instanceNEWLINE
# -*- coding: utf-8 -*-NEWLINENEWLINE# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:NEWLINE# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-codeNEWLINENEWLINEfrom ccxt.kucoin import kucoinNEWLINEfrom ccxt.base.errors import AuthenticationErrorNEWLINEfrom ccxt.base.errors import PermissionDeniedNEWLINEfrom ccxt.base.errors import AccountSuspendedNEWLINEfrom ccxt.base.errors import ArgumentsRequiredNEWLINEfrom ccxt.base.errors import BadRequestNEWLINEfrom ccxt.base.errors import InsufficientFundsNEWLINEfrom ccxt.base.errors import InvalidOrderNEWLINEfrom ccxt.base.errors import NotSupportedNEWLINEfrom ccxt.base.errors import RateLimitExceededNEWLINEfrom ccxt.base.errors import ExchangeNotAvailableNEWLINEfrom ccxt.base.errors import InvalidNonceNEWLINEfrom ccxt.base.precise import PreciseNEWLINENEWLINENEWLINEclass kucoinfutures(kucoin):NEWLINENEWLINE def describe(self):NEWLINE return self.deep_extend(super(kucoinfutures, self).describe(), {NEWLINE 'id': 'kucoinfutures',NEWLINE 'name': 'Kucoin Futures',NEWLINE 'countries': ['SC'],NEWLINE 'rateLimit': 75,NEWLINE 'version': 'v1',NEWLINE 'certified': False,NEWLINE 'pro': False,NEWLINE 'comment': 'Platform 2.0',NEWLINE 'quoteJsonNumbers': False,NEWLINE 'has': {NEWLINE 'cancelAllOrders': True,NEWLINE 'cancelOrder': True,NEWLINE 'CORS': None,NEWLINE 'createDepositAddress': False,NEWLINE 'createOrder': True,NEWLINE 'fetchAccounts': False,NEWLINE 'fetchBalance': True,NEWLINE 'fetchBorrowRate': False,NEWLINE 'fetchBorrowRates': False,NEWLINE 'fetchBorrowRatesPerSymbol': False,NEWLINE 'fetchClosedOrders': True,NEWLINE 'fetchCurrencies': False,NEWLINE 'fetchDepositAddress': True,NEWLINE 'fetchDeposits': None,NEWLINE 'fetchFundingFee': False,NEWLINE 'fetchFundingHistory': True,NEWLINE 'fetchFundingRate': True,NEWLINE 'fetchFundingRateHistory': False,NEWLINE 'fetchIndexOHLCV': False,NEWLINE 'fetchL3OrderBook': False,NEWLINE 'fetchLedger': False,NEWLINE 'fetchMarkets': True,NEWLINE 'fetchMarkOHLCV': False,NEWLINE 'fetchMyTrades': True,NEWLINE 'fetchOHLCV': True,NEWLINE 'fetchOpenOrders': True,NEWLINE 'fetchOrder': True,NEWLINE 'fetchOrderBook': True,NEWLINE 'fetchOrdersByStatus': True,NEWLINE 'fetchPositions': True,NEWLINE 'fetchPremiumIndexOHLCV': False,NEWLINE 'fetchStatus': True,NEWLINE 'fetchTicker': True,NEWLINE 'fetchTickers': False,NEWLINE 'fetchTime': True,NEWLINE 'fetchTrades': True,NEWLINE 'fetchWithdrawals': None,NEWLINE 'loadTimeDifference': True,NEWLINE 'setMarginMode': False,NEWLINE 'transfer': True,NEWLINE 'transferOut': True,NEWLINE 'withdraw': None,NEWLINE },NEWLINE 'urls': {NEWLINE 'logo': 'https://user-images.githubusercontent.com/1294454/147508995-9e35030a-d046-43a1-a006-6fabd981b554.jpg',NEWLINE 'doc': [NEWLINE 'https://docs.kucoin.com/futures',NEWLINE 'https://docs.kucoin.com',NEWLINE ],NEWLINE 'www': 'https://futures.kucoin.com/',NEWLINE 'referral': 'https://futures.kucoin.com/?rcode=E5wkqe',NEWLINE 'api': {NEWLINE 'public': 'https://openapi-v2.kucoin.com',NEWLINE 'private': 'https://openapi-v2.kucoin.com',NEWLINE 'futuresPrivate': 'https://api-futures.kucoin.com',NEWLINE 'futuresPublic': 'https://api-futures.kucoin.com',NEWLINE },NEWLINE 'test': {NEWLINE 'public': 'https://openapi-sandbox.kucoin.com',NEWLINE 'private': 'https://openapi-sandbox.kucoin.com',NEWLINE 'futuresPrivate': 'https://api-sandbox-futures.kucoin.com',NEWLINE 'futuresPublic': 'https://api-sandbox-futures.kucoin.com',NEWLINE },NEWLINE },NEWLINE 'requiredCredentials': {NEWLINE 'apiKey': True,NEWLINE 'secret': True,NEWLINE 'password': True,NEWLINE },NEWLINE 'api': {NEWLINE 'futuresPublic': {NEWLINE 'get': {NEWLINE 'contracts/active': 1,NEWLINE 'contracts/{symbol}': 1,NEWLINE 'ticker': 1,NEWLINE 'level2/snapshot': 1.33,NEWLINE 'level2/depth{limit}': 1,NEWLINE 'level2/message/query': 1,NEWLINE 'level3/message/query': 1, # deprecated,level3/snapshot is suggestedNEWLINE 'level3/snapshot': 1, # v2NEWLINE 'trade/history': 1,NEWLINE 'interest/query': 1,NEWLINE 'index/query': 1,NEWLINE 'mark-price/{symbol}/current': 1,NEWLINE 'premium/query': 1,NEWLINE 'funding-rate/{symbol}/current': 1,NEWLINE 'timestamp': 1,NEWLINE 'status': 1,NEWLINE 'kline/query': 1,NEWLINE },NEWLINE 'post': {NEWLINE 'bullet-public': 1,NEWLINE },NEWLINE },NEWLINE 'futuresPrivate': {NEWLINE 'get': {NEWLINE 'account-overview': 1.33,NEWLINE 'transaction-history': 4.44,NEWLINE 'deposit-address': 1,NEWLINE 'deposit-list': 1,NEWLINE 'withdrawals/quotas': 1,NEWLINE 'withdrawal-list': 1,NEWLINE 'transfer-list': 1,NEWLINE 'orders': 1.33,NEWLINE 'stopOrders': 1,NEWLINE 'recentDoneOrders': 1,NEWLINE 'orders/{orderId}': 1, # ?clientOid={client-order-id} # get order by orderIdNEWLINE 'orders/byClientOid': 1, # ?clientOid=eresc138b21023a909e5ad59 # get order by clientOidNEWLINE 'fills': 4.44,NEWLINE 'recentFills': 4.44,NEWLINE 'openOrderStatistics': 1,NEWLINE 'position': 1,NEWLINE 'positions': 4.44,NEWLINE 'funding-history': 4.44,NEWLINE },NEWLINE 'post': {NEWLINE 'withdrawals': 1,NEWLINE 'transfer-out': 1, # v2NEWLINE 'orders': 1.33,NEWLINE 'position/margin/auto-deposit-status': 1,NEWLINE 'position/margin/deposit-margin': 1,NEWLINE 'bullet-private': 1,NEWLINE },NEWLINE 'delete': {NEWLINE 'withdrawals/{withdrawalId}': 1,NEWLINE 'cancel/transfer-out': 1,NEWLINE 'orders/{orderId}': 1,NEWLINE 'orders': 4.44,NEWLINE 'stopOrders': 1,NEWLINE },NEWLINE },NEWLINE },NEWLINE 'exceptions': {NEWLINE 'exact': {NEWLINE '400': BadRequest, # Bad Request -- Invalid request formatNEWLINE '401': AuthenticationError, # Unauthorized -- Invalid API KeyNEWLINE '403': NotSupported, # Forbidden -- The request is forbiddenNEWLINE '404': NotSupported, # Not Found -- The specified resource could not be foundNEWLINE '405': NotSupported, # Method Not Allowed -- You tried to access the resource with an invalid method.NEWLINE '415': BadRequest, # Content-Type -- application/jsonNEWLINE '429': RateLimitExceeded, # Too Many Requests -- Access limit breachedNEWLINE '500': ExchangeNotAvailable, # Internal Server Error -- We had a problem with our server. Try again later.NEWLINE '503': ExchangeNotAvailable, # Service Unavailable -- We're temporarily offline for maintenance. Please try again later.NEWLINE '100001': InvalidOrder, # {"code":"100001","msg":"Unavailable to enable both \"postOnly\" and \"hidden\""}NEWLINE '100004': BadRequest, # {"code":"100004","msg":"Order is in not cancelable state"}NEWLINE '101030': PermissionDenied, # {"code":"101030","msg":"You haven't yet enabled the margin trading"}NEWLINE '200004': InsufficientFunds,NEWLINE '230003': InsufficientFunds, # {"code":"230003","msg":"Balance insufficient!"}NEWLINE '260100': InsufficientFunds, # {"code":"260100","msg":"account.noBalance"}NEWLINE '300003': InsufficientFunds,NEWLINE '300012': InvalidOrder,NEWLINE '400001': AuthenticationError, # Any of KC-API-KEY, KC-API-SIGN, KC-API-TIMESTAMP, KC-API-PASSPHRASE is missing in your request header.NEWLINE '400002': InvalidNonce, # KC-API-TIMESTAMP Invalid -- Time differs from server time by more than 5 secondsNEWLINE '400003': AuthenticationError, # KC-API-KEY not existsNEWLINE '400004': AuthenticationError, # KC-API-PASSPHRASE errorNEWLINE '400005': AuthenticationError, # Signature error -- Please check your signatureNEWLINE '400006': AuthenticationError, # The IP address is not in the API whitelistNEWLINE '400007': AuthenticationError, # Access Denied -- Your API key does not have sufficient permissions to access the URINEWLINE '404000': NotSupported, # URL Not Found -- The requested resource could not be foundNEWLINE '400100': BadRequest, # Parameter Error -- You tried to access the resource with invalid parametersNEWLINE '411100': AccountSuspended, # User is frozen -- Please contact us via support centerNEWLINE '500000': ExchangeNotAvailable, # Internal Server Error -- We had a problem with our server. Try again later.NEWLINE },NEWLINE },NEWLINE 'fees': {NEWLINE 'trading': {NEWLINE 'tierBased': True,NEWLINE 'percentage': True,NEWLINE 'taker': self.parse_number('0.0006'),NEWLINE 'maker': self.parse_number('0.0002'),NEWLINE 'tiers': {NEWLINE 'taker': [NEWLINE [self.parse_number('0'), self.parse_number('0.0006')],NEWLINE [self.parse_number('50'), self.parse_number('0.0006')],NEWLINE [self.parse_number('200'), self.parse_number('0.0006')],NEWLINE [self.parse_number('500'), self.parse_number('0.0005')],NEWLINE [self.parse_number('1000'), self.parse_number('0.0004')],NEWLINE [self.parse_number('2000'), self.parse_number('0.0004')],NEWLINE [self.parse_number('4000'), self.parse_number('0.00038')],NEWLINE [self.parse_number('8000'), self.parse_number('0.00035')],NEWLINE [self.parse_number('15000'), self.parse_number('0.00032')],NEWLINE [self.parse_number('25000'), self.parse_number('0.0003')],NEWLINE [self.parse_number('40000'), self.parse_number('0.0003')],NEWLINE [self.parse_number('60000'), self.parse_number('0.0003')],NEWLINE [self.parse_number('80000'), self.parse_number('0.0003')],NEWLINE ],NEWLINE 'maker': [NEWLINE [self.parse_number('0'), self.parse_number('0.02')],NEWLINE [self.parse_number('50'), self.parse_number('0.015')],NEWLINE [self.parse_number('200'), self.parse_number('0.01')],NEWLINE [self.parse_number('500'), self.parse_number('0.01')],NEWLINE [self.parse_number('1000'), self.parse_number('0.01')],NEWLINE [self.parse_number('2000'), self.parse_number('0')],NEWLINE [self.parse_number('4000'), self.parse_number('0')],NEWLINE [self.parse_number('8000'), self.parse_number('0')],NEWLINE [self.parse_number('15000'), self.parse_number('-0.003')],NEWLINE [self.parse_number('25000'), self.parse_number('-0.006')],NEWLINE [self.parse_number('40000'), self.parse_number('-0.009')],NEWLINE [self.parse_number('60000'), self.parse_number('-0.012')],NEWLINE [self.parse_number('80000'), self.parse_number('-0.015')],NEWLINE ],NEWLINE },NEWLINE },NEWLINE 'funding': {NEWLINE 'tierBased': False,NEWLINE 'percentage': False,NEWLINE 'withdraw': {},NEWLINE 'deposit': {},NEWLINE },NEWLINE },NEWLINE 'commonCurrencies': {NEWLINE 'HOT': 'HOTNOW',NEWLINE 'EDGE': 'DADI', # https://github.com/ccxt/ccxt/issues/5756NEWLINE 'WAX': 'WAXP',NEWLINE 'TRY': 'Trias',NEWLINE 'VAI': 'VAIOT',NEWLINE 'XBT': 'BTC',NEWLINE },NEWLINE 'timeframes': {NEWLINE '1m': 1,NEWLINE '3m': None,NEWLINE '5m': 5,NEWLINE '15m': 15,NEWLINE '30m': 30,NEWLINE '1h': 60,NEWLINE '2h': 120,NEWLINE '4h': 240,NEWLINE '6h': None,NEWLINE '8h': 480,NEWLINE '12h': 720,NEWLINE '1d': 1440,NEWLINE '1w': 10080,NEWLINE },NEWLINE 'options': {NEWLINE 'version': 'v1',NEWLINE 'symbolSeparator': '-',NEWLINE 'defaultType': 'swap',NEWLINE 'marginTypes': {},NEWLINE # endpoint versionsNEWLINE 'versions': {NEWLINE 'futuresPrivate': {NEWLINE 'POST': {NEWLINE 'transfer-out': 'v2',NEWLINE },NEWLINE },NEWLINE 'futuresPublic': {NEWLINE 'GET': {NEWLINE 'level3/snapshot': 'v2',NEWLINE },NEWLINE },NEWLINE },NEWLINE 'networks': {NEWLINE 'OMNI': 'omni',NEWLINE 'ERC20': 'eth',NEWLINE 'TRC20': 'trx',NEWLINE },NEWLINE },NEWLINE })NEWLINENEWLINE def fetch_accounts(self, params={}):NEWLINE raise BadRequest(self.id + ' has no method fetchAccounts')NEWLINENEWLINE def load_time_difference(self, params={}):NEWLINE response = self.futuresPublicGetTimestamp(params)NEWLINE after = self.milliseconds()NEWLINE kucoinTime = self.safe_integer(response, 'data')NEWLINE self.options['timeDifference'] = int(after - kucoinTime)NEWLINE return self.options['timeDifference']NEWLINENEWLINE def fetch_status(self, params={}):NEWLINE response = self.futuresPublicGetStatus(params)NEWLINE #NEWLINE # {NEWLINE # "code":"200000",NEWLINE # "data":{NEWLINE # "msg":"",NEWLINE # "status":"open"NEWLINE # }NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data', {})NEWLINE status = self.safe_value(data, 'status')NEWLINE if status is not None:NEWLINE status = 'ok' if (status == 'open') else 'maintenance'NEWLINE self.status = self.extend(self.status, {NEWLINE 'status': status,NEWLINE 'updated': self.milliseconds(),NEWLINE })NEWLINE return self.statusNEWLINENEWLINE def fetch_markets(self, params={}):NEWLINE response = self.futuresPublicGetContractsActive(params)NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "symbol": "ETHUSDTM",NEWLINE # "rootSymbol": "USDT",NEWLINE # "type": "FFWCSX",NEWLINE # "firstOpenDate": 1591086000000,NEWLINE # "expireDate": null,NEWLINE # "settleDate": null,NEWLINE # "baseCurrency": "ETH",NEWLINE # "quoteCurrency": "USDT",NEWLINE # "settleCurrency": "USDT",NEWLINE # "maxOrderQty": 1000000,NEWLINE # "maxPrice": 1000000.0000000000,NEWLINE # "lotSize": 1,NEWLINE # "tickSize": 0.05,NEWLINE # "indexPriceTickSize": 0.01,NEWLINE # "multiplier": 0.01,NEWLINE # "initialMargin": 0.01,NEWLINE # "maintainMargin": 0.005,NEWLINE # "maxRiskLimit": 1000000,NEWLINE # "minRiskLimit": 1000000,NEWLINE # "riskStep": 500000,NEWLINE # "makerFeeRate": 0.00020,NEWLINE # "takerFeeRate": 0.00060,NEWLINE # "takerFixFee": 0.0000000000,NEWLINE # "makerFixFee": 0.0000000000,NEWLINE # "settlementFee": null,NEWLINE # "isDeleverage": True,NEWLINE # "isQuanto": True,NEWLINE # "isInverse": False,NEWLINE # "markMethod": "FairPrice",NEWLINE # "fairMethod": "FundingRate",NEWLINE # "fundingBaseSymbol": ".ETHINT8H",NEWLINE # "fundingQuoteSymbol": ".USDTINT8H",NEWLINE # "fundingRateSymbol": ".ETHUSDTMFPI8H",NEWLINE # "indexSymbol": ".KETHUSDT",NEWLINE # "settlementSymbol": "",NEWLINE # "status": "Open",NEWLINE # "fundingFeeRate": 0.000535,NEWLINE # "predictedFundingFeeRate": 0.002197,NEWLINE # "openInterest": "8724443",NEWLINE # "turnoverOf24h": 341156641.03354263,NEWLINE # "volumeOf24h": 74833.54000000,NEWLINE # "markPrice": 4534.07,NEWLINE # "indexPrice":4531.92,NEWLINE # "lastTradePrice": 4545.4500000000,NEWLINE # "nextFundingRateTime": 25481884,NEWLINE # "maxLeverage": 100,NEWLINE # "sourceExchanges": [NEWLINE # "huobi",NEWLINE # "Okex",NEWLINE # "Binance",NEWLINE # "Kucoin",NEWLINE # "Poloniex",NEWLINE # "Hitbtc"NEWLINE # ],NEWLINE # "premiumsSymbol1M": ".ETHUSDTMPI",NEWLINE # "premiumsSymbol8H": ".ETHUSDTMPI8H",NEWLINE # "fundingBaseSymbol1M": ".ETHINT",NEWLINE # "fundingQuoteSymbol1M": ".USDTINT",NEWLINE # "lowPrice": 4456.90,NEWLINE # "highPrice": 4674.25,NEWLINE # "priceChgPct": 0.0046,NEWLINE # "priceChg": 21.15NEWLINE # }NEWLINE # }NEWLINE #NEWLINE result = []NEWLINE data = self.safe_value(response, 'data')NEWLINE for i in range(0, len(data)):NEWLINE market = data[i]NEWLINE id = self.safe_string(market, 'symbol')NEWLINE expiry = self.safe_integer(market, 'expireDate')NEWLINE futures = True if expiry else FalseNEWLINE swap = not futuresNEWLINE baseId = self.safe_string(market, 'baseCurrency')NEWLINE quoteId = self.safe_string(market, 'quoteCurrency')NEWLINE settleId = self.safe_string(market, 'settleCurrency')NEWLINE base = self.safe_currency_code(baseId)NEWLINE quote = self.safe_currency_code(quoteId)NEWLINE settle = self.safe_currency_code(settleId)NEWLINE symbol = base + '/' + quote + ':' + settleNEWLINE type = 'swap'NEWLINE if futures:NEWLINE symbol = symbol + '-' + self.yymmdd(expiry, '')NEWLINE type = 'futures'NEWLINE baseMaxSize = self.safe_number(market, 'baseMaxSize')NEWLINE baseMinSizeString = self.safe_string(market, 'baseMinSize')NEWLINE quoteMaxSizeString = self.safe_string(market, 'quoteMaxSize')NEWLINE baseMinSize = self.parse_number(baseMinSizeString)NEWLINE quoteMaxSize = self.parse_number(quoteMaxSizeString)NEWLINE quoteMinSize = self.safe_number(market, 'quoteMinSize')NEWLINE inverse = self.safe_value(market, 'isInverse')NEWLINE # quoteIncrement = self.safe_number(market, 'quoteIncrement')NEWLINE amount = self.safe_string(market, 'baseIncrement')NEWLINE price = self.safe_string(market, 'priceIncrement')NEWLINE result.append({NEWLINE 'id': id,NEWLINE 'symbol': symbol,NEWLINE 'baseId': baseId,NEWLINE 'quoteId': quoteId,NEWLINE 'settleId': settleId,NEWLINE 'base': base,NEWLINE 'quote': quote,NEWLINE 'settle': settle,NEWLINE 'type': type,NEWLINE 'spot': False,NEWLINE 'margin': False,NEWLINE 'swap': swap,NEWLINE 'futures': futures,NEWLINE 'option': False,NEWLINE 'active': True,NEWLINE 'derivative': True,NEWLINE 'contract': True,NEWLINE 'linear': inverse is not True,NEWLINE 'inverse': inverse,NEWLINE 'taker': self.safe_number(market, 'takerFeeRate'),NEWLINE 'maker': self.safe_number(market, 'makerFeeRate'),NEWLINE 'contractSize': self.parse_number(Precise.string_abs(self.safe_string(market, 'multiplier'))),NEWLINE 'expiry': self.parse_number(expiry),NEWLINE 'expiryDatetime': self.iso8601(expiry),NEWLINE 'strike': None,NEWLINE 'optionType': None,NEWLINE 'precision': {NEWLINE 'amount': self.precision_from_string(amount) if amount else None,NEWLINE 'price': self.precision_from_string(price) if price else None,NEWLINE },NEWLINE 'limits': {NEWLINE 'leverage': {NEWLINE 'min': self.parse_number('1'),NEWLINE 'max': self.safe_number(market, 'maxLeverage', 1),NEWLINE },NEWLINE 'amount': {NEWLINE 'min': baseMinSize,NEWLINE 'max': baseMaxSize,NEWLINE },NEWLINE 'price': {NEWLINE 'min': price,NEWLINE 'max': self.parse_number(Precise.string_div(quoteMaxSizeString, baseMinSizeString)),NEWLINE },NEWLINE 'cost': {NEWLINE 'min': quoteMinSize,NEWLINE 'max': quoteMaxSize,NEWLINE },NEWLINE },NEWLINE 'info': market,NEWLINE })NEWLINE return resultNEWLINENEWLINE def fetch_time(self, params={}):NEWLINE response = self.futuresPublicGetTimestamp(params)NEWLINE #NEWLINE # {NEWLINE # code: "200000",NEWLINE # data: 1637385119302,NEWLINE # }NEWLINE #NEWLINE return self.safe_number(response, 'data')NEWLINENEWLINE def fetch_ohlcv(self, symbol, timeframe='15m', since=None, limit=None, params={}):NEWLINE self.load_markets()NEWLINE market = self.market(symbol)NEWLINE marketId = market['id']NEWLINE request = {NEWLINE 'symbol': marketId,NEWLINE 'granularity': self.timeframes[timeframe],NEWLINE }NEWLINE duration = self.parse_timeframe(timeframe) * 1000NEWLINE endAt = self.milliseconds()NEWLINE if since is not None:NEWLINE request['from'] = sinceNEWLINE if limit is None:NEWLINE limit = self.safe_integer(self.options, 'fetchOHLCVLimit', 200)NEWLINE endAt = self.sum(since, limit * duration)NEWLINE elif limit is not None:NEWLINE since = endAt - limit * durationNEWLINE request['from'] = sinceNEWLINE request['to'] = endAtNEWLINE response = self.futuresPublicGetKlineQuery(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": [NEWLINE # [1636459200000, 4779.3, 4792.1, 4768.7, 4770.3, 78051],NEWLINE # [1636460100000, 4770.25, 4778.55, 4757.55, 4777.25, 80164],NEWLINE # [1636461000000, 4777.25, 4791.45, 4774.5, 4791.3, 51555]NEWLINE # ]NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data', [])NEWLINE return self.parse_ohlcvs(data, market, timeframe, since, limit)NEWLINENEWLINE def parse_ohlcv(self, ohlcv, market=None):NEWLINE #NEWLINE # [NEWLINE # "1545904980000", # Start time of the candle cycleNEWLINE # "0.058", # opening priceNEWLINE # "0.049", # closing priceNEWLINE # "0.058", # highest priceNEWLINE # "0.049", # lowest priceNEWLINE # "0.018", # base volumeNEWLINE # "0.000945", # quote volumeNEWLINE # ]NEWLINE #NEWLINE return [NEWLINE self.safe_integer(ohlcv, 0),NEWLINE self.safe_number(ohlcv, 1),NEWLINE self.safe_number(ohlcv, 2),NEWLINE self.safe_number(ohlcv, 3),NEWLINE self.safe_number(ohlcv, 4),NEWLINE self.safe_number(ohlcv, 5),NEWLINE ]NEWLINENEWLINE def create_deposit_address(self, code, params={}):NEWLINE raise BadRequest(self.id + ' has no method createDepositAddress')NEWLINENEWLINE def fetch_deposit_address(self, code, params={}):NEWLINE self.load_markets()NEWLINE currency = self.currency(code)NEWLINE currencyId = currency['id']NEWLINE request = {NEWLINE 'currency': currencyId, # Currency,including XBT,USDTNEWLINE }NEWLINE response = self.futuresPrivateGetDepositAddress(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "address": "0x78d3ad1c0aa1bf068e19c94a2d7b16c9c0fcd8b1",//Deposit addressNEWLINE # "memo": null//Address tag. If the returned value is null, it means that the requested token has no memo. If you are to transfer funds from another platform to KuCoin Futures and if the token to be #transferred has memo(tag), you need to fill in the memo to ensure the transferred funds will be sent #to the address you specified.NEWLINE # }NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data', {})NEWLINE address = self.safe_string(data, 'address')NEWLINE if currencyId != 'NIM':NEWLINE # contains spacesNEWLINE self.check_address(address)NEWLINE return {NEWLINE 'info': response,NEWLINE 'currency': currencyId,NEWLINE 'address': address,NEWLINE 'tag': self.safe_string(data, 'memo'),NEWLINE 'network': self.safe_string(data, 'chain'),NEWLINE }NEWLINENEWLINE def fetch_order_book(self, symbol, limit=None, params={}):NEWLINE self.load_markets()NEWLINE level = self.safe_number(params, 'level')NEWLINE if level != 2 and level is not None:NEWLINE raise BadRequest(self.id + ' fetchOrderBook can only return level 2')NEWLINE market = self.market(symbol)NEWLINE request = {NEWLINE 'symbol': market['id'],NEWLINE }NEWLINE if limit is not None:NEWLINE if (limit == 20) or (limit == 100):NEWLINE request['limit'] = limitNEWLINE else:NEWLINE raise BadRequest(self.id + ' fetchOrderBook limit argument must be 20 or 100')NEWLINE else:NEWLINE request['limit'] = 20NEWLINE response = self.futuresPublicGetLevel2DepthLimit(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "symbol": "XBTUSDM", #SymbolNEWLINE # "sequence": 100, #Ticker sequence numberNEWLINE # "asks": [NEWLINE # ["5000.0", 1000], #Price, quantityNEWLINE # ["6000.0", 1983] #Price, quantityNEWLINE # ],NEWLINE # "bids": [NEWLINE # ["3200.0", 800], #Price, quantityNEWLINE # ["3100.0", 100] #Price, quantityNEWLINE # ],NEWLINE # "ts": 1604643655040584408 # timestampNEWLINE # }NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data', {})NEWLINE timestamp = int(self.safe_integer(data, 'ts') / 1000000)NEWLINE orderbook = self.parse_order_book(data, symbol, timestamp, 'bids', 'asks', 0, 1)NEWLINE orderbook['nonce'] = self.safe_integer(data, 'sequence')NEWLINE return orderbookNEWLINENEWLINE def fetch_l3_order_book(self, symbol, limit=None, params={}):NEWLINE raise BadRequest(self.id + ' only can only fetch the L2 order book')NEWLINENEWLINE def fetch_ticker(self, symbol, params={}):NEWLINE self.load_markets()NEWLINE market = self.market(symbol)NEWLINE request = {NEWLINE 'symbol': market['id'],NEWLINE }NEWLINE response = self.futuresPublicGetTicker(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "sequence": 1638444978558,NEWLINE # "symbol": "ETHUSDTM",NEWLINE # "side": "sell",NEWLINE # "size": 4,NEWLINE # "price": "4229.35",NEWLINE # "bestBidSize": 2160,NEWLINE # "bestBidPrice": "4229.0",NEWLINE # "bestAskPrice": "4229.05",NEWLINE # "tradeId": "61aaa8b777a0c43055fe4851",NEWLINE # "ts": 1638574296209786785,NEWLINE # "bestAskSize": 36,NEWLINE # }NEWLINE # }NEWLINE #NEWLINE return self.parse_ticker(response['data'], market)NEWLINENEWLINE def parse_ticker(self, ticker, market=None):NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "sequence": 1629930362547,NEWLINE # "symbol": "ETHUSDTM",NEWLINE # "side": "buy",NEWLINE # "size": 130,NEWLINE # "price": "4724.7",NEWLINE # "bestBidSize": 5,NEWLINE # "bestBidPrice": "4724.6",NEWLINE # "bestAskPrice": "4724.65",NEWLINE # "tradeId": "618d2a5a77a0c4431d2335f4",NEWLINE # "ts": 1636641371963227600,NEWLINE # "bestAskSize": 1789NEWLINE # }NEWLINE # }NEWLINE #NEWLINE last = self.safe_number(ticker, 'price')NEWLINE marketId = self.safe_string(ticker, 'symbol')NEWLINE market = self.safe_market(marketId, market, '-')NEWLINE timestamp = Precise.string_div(self.safe_string(ticker, 'ts'), '1000000')NEWLINE return self.safe_ticker({NEWLINE 'symbol': market['symbol'],NEWLINE 'timestamp': timestamp,NEWLINE 'datetime': self.iso8601(timestamp),NEWLINE 'high': None,NEWLINE 'low': None,NEWLINE 'bid': self.safe_number(ticker, 'bestBidPrice'),NEWLINE 'bidVolume': self.safe_number(ticker, 'bestBidSize'),NEWLINE 'ask': self.safe_number(ticker, 'bestAskPrice'),NEWLINE 'askVolume': self.safe_number(ticker, 'bestAskSize'),NEWLINE 'vwap': None,NEWLINE 'open': None,NEWLINE 'close': last,NEWLINE 'last': last,NEWLINE 'previousClose': None,NEWLINE 'change': None,NEWLINE 'percentage': None,NEWLINE 'average': None,NEWLINE 'baseVolume': None,NEWLINE 'quoteVolume': None,NEWLINE 'info': ticker,NEWLINE }, market)NEWLINENEWLINE def fetch_funding_history(self, symbol=None, since=None, limit=None, params={}):NEWLINE #NEWLINE # PrivateNEWLINE # @param symbol(string): The pair for which the contract was tradedNEWLINE # @param since(number): The unix start time of the first funding payment requestedNEWLINE # @param limit(number): The number of results to returnNEWLINE # @param params(dict): Additional parameters to send to the APINEWLINE # @param return: Data for the history of the accounts funding payments for futures contractsNEWLINE #NEWLINE if symbol is None:NEWLINE raise ArgumentsRequired(self.id + ' fetchFundingHistory() requires a symbol argument')NEWLINE self.load_markets()NEWLINE market = self.market(symbol)NEWLINE request = {NEWLINE 'symbol': market['id'],NEWLINE }NEWLINE if since is not None:NEWLINE request['startAt'] = sinceNEWLINE if limit is not None:NEWLINE # * Since is ignored if limit is definedNEWLINE request['maxCount'] = limitNEWLINE response = self.futuresPrivateGetFundingHistory(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "dataList": [NEWLINE # {NEWLINE # "id": 239471298749817,NEWLINE # "symbol": "ETHUSDTM",NEWLINE # "timePoint": 1638532800000,NEWLINE # "fundingRate": 0.000100,NEWLINE # "markPrice": 4612.8300000000,NEWLINE # "positionQty": 12,NEWLINE # "positionCost": 553.5396000000,NEWLINE # "funding": -0.0553539600,NEWLINE # "settleCurrency": "USDT"NEWLINE # },NEWLINE # ...NEWLINE # ],NEWLINE # "hasMore": TrueNEWLINE # }NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data')NEWLINE dataList = self.safe_value(data, 'dataList')NEWLINE fees = []NEWLINE for i in range(0, len(dataList)):NEWLINE listItem = dataList[i]NEWLINE timestamp = self.safe_integer(listItem, 'timePoint')NEWLINE fees.append({NEWLINE 'info': listItem,NEWLINE 'symbol': symbol,NEWLINE 'code': self.safe_currency_code(self.safe_string(listItem, 'settleCurrency')),NEWLINE 'timestamp': timestamp,NEWLINE 'datetime': self.iso8601(timestamp),NEWLINE 'id': self.safe_number(listItem, 'id'),NEWLINE 'amount': self.safe_number(listItem, 'funding'),NEWLINE 'fundingRate': self.safe_number(listItem, 'fundingRate'),NEWLINE 'markPrice': self.safe_number(listItem, 'markPrice'),NEWLINE 'positionQty': self.safe_number(listItem, 'positionQty'),NEWLINE 'positionCost': self.safe_number(listItem, 'positionCost'),NEWLINE })NEWLINE return feesNEWLINENEWLINE def fetch_positions(self, symbols=None, params={}):NEWLINE response = self.futuresPrivateGetPositions(params)NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": [NEWLINE # {NEWLINE # "id": "615ba79f83a3410001cde321",NEWLINE # "symbol": "ETHUSDTM",NEWLINE # "autoDeposit": False,NEWLINE # "maintMarginReq": 0.005,NEWLINE # "riskLimit": 1000000,NEWLINE # "realLeverage": 18.61,NEWLINE # "crossMode": False,NEWLINE # "delevPercentage": 0.86,NEWLINE # "openingTimestamp": 1638563515618,NEWLINE # "currentTimestamp": 1638576872774,NEWLINE # "currentQty": 2,NEWLINE # "currentCost": 83.64200000,NEWLINE # "currentComm": 0.05018520,NEWLINE # "unrealisedCost": 83.64200000,NEWLINE # "realisedGrossCost": 0.00000000,NEWLINE # "realisedCost": 0.05018520,NEWLINE # "isOpen": True,NEWLINE # "markPrice": 4225.01,NEWLINE # "markValue": 84.50020000,NEWLINE # "posCost": 83.64200000,NEWLINE # "posCross": 0.0000000000,NEWLINE # "posInit": 3.63660870,NEWLINE # "posComm": 0.05236717,NEWLINE # "posLoss": 0.00000000,NEWLINE # "posMargin": 3.68897586,NEWLINE # "posMaint": 0.50637594,NEWLINE # "maintMargin": 4.54717586,NEWLINE # "realisedGrossPnl": 0.00000000,NEWLINE # "realisedPnl": -0.05018520,NEWLINE # "unrealisedPnl": 0.85820000,NEWLINE # "unrealisedPnlPcnt": 0.0103,NEWLINE # "unrealisedRoePcnt": 0.2360,NEWLINE # "avgEntryPrice": 4182.10,NEWLINE # "liquidationPrice": 4023.00,NEWLINE # "bankruptPrice": 4000.25,NEWLINE # "settleCurrency": "USDT",NEWLINE # "isInverse": FalseNEWLINE # }NEWLINE # ]NEWLINE # }NEWLINE #NEWLINE return self.parse_positions(self.safe_value(response, 'data'))NEWLINENEWLINE def parse_positions(self, positions):NEWLINE result = []NEWLINE for i in range(0, len(positions)):NEWLINE result.append(self.parse_position(positions[i]))NEWLINE return resultNEWLINENEWLINE def parse_position(self, position, market=None):NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": [NEWLINE # {NEWLINE # "id": "615ba79f83a3410001cde321", # Position IDNEWLINE # "symbol": "ETHUSDTM", # SymbolNEWLINE # "autoDeposit": False, # Auto deposit margin or notNEWLINE # "maintMarginReq": 0.005, # Maintenance margin requirementNEWLINE # "riskLimit": 1000000, # Risk limitNEWLINE # "realLeverage": 25.92, # Leverage of the orderNEWLINE # "crossMode": False, # Cross mode or notNEWLINE # "delevPercentage": 0.76, # ADL ranking percentileNEWLINE # "openingTimestamp": 1638578546031, # Open timeNEWLINE # "currentTimestamp": 1638578563580, # Current timestampNEWLINE # "currentQty": 2, # Current postion quantityNEWLINE # "currentCost": 83.787, # Current postion valueNEWLINE # "currentComm": 0.0167574, # Current commissionNEWLINE # "unrealisedCost": 83.787, # Unrealised valueNEWLINE # "realisedGrossCost": 0.0, # Accumulated realised gross profit valueNEWLINE # "realisedCost": 0.0167574, # Current realised position valueNEWLINE # "isOpen": True, # Opened position or notNEWLINE # "markPrice": 4183.38, # Mark priceNEWLINE # "markValue": 83.6676, # Mark valueNEWLINE # "posCost": 83.787, # Position valueNEWLINE # "posCross": 0.0, # added marginNEWLINE # "posInit": 3.35148, # Leverage marginNEWLINE # "posComm": 0.05228309, # Bankruptcy costNEWLINE # "posLoss": 0.0, # Funding fees paid outNEWLINE # "posMargin": 3.40376309, # Position marginNEWLINE # "posMaint": 0.50707892, # Maintenance marginNEWLINE # "maintMargin": 3.28436309, # Position marginNEWLINE # "realisedGrossPnl": 0.0, # Accumulated realised gross profit valueNEWLINE # "realisedPnl": -0.0167574, # Realised profit and lossNEWLINE # "unrealisedPnl": -0.1194, # Unrealised profit and lossNEWLINE # "unrealisedPnlPcnt": -0.0014, # Profit-loss ratio of the positionNEWLINE # "unrealisedRoePcnt": -0.0356, # Rate of return on investmentNEWLINE # "avgEntryPrice": 4189.35, # Average entry priceNEWLINE # "liquidationPrice": 4044.55, # Liquidation priceNEWLINE # "bankruptPrice": 4021.75, # Bankruptcy priceNEWLINE # "settleCurrency": "USDT", # Currency used to clear and settle the tradesNEWLINE # "isInverse": FalseNEWLINE # }NEWLINE # ]NEWLINE # }NEWLINE #NEWLINE symbol = self.safe_string(position, 'symbol')NEWLINE market = self.safe_market(symbol, market)NEWLINE timestamp = self.safe_number(position, 'currentTimestamp')NEWLINE size = self.safe_string(position, 'currentQty')NEWLINE side = NoneNEWLINE if Precise.string_gt(size, '0'):NEWLINE side = 'buy'NEWLINE elif Precise.string_lt(size, '0'):NEWLINE side = 'sell'NEWLINE notional = Precise.string_abs(self.safe_string(position, 'posCost'))NEWLINE initialMargin = self.safe_string(position, 'posMargin')NEWLINE initialMarginPercentage = Precise.string_div(initialMargin, notional)NEWLINE leverage = Precise.string_div('1', initialMarginPercentage) # TODO: Not quite rightNEWLINE # marginRatio = Precise.string_div(maintenanceRate, collateral)NEWLINE unrealisedPnl = self.safe_string(position, 'unrealisedPnl')NEWLINE return {NEWLINE 'info': position,NEWLINE 'symbol': self.safe_string(market, 'symbol'),NEWLINE 'timestamp': timestamp,NEWLINE 'datetime': self.iso8601(timestamp),NEWLINE 'initialMargin': self.parse_number(initialMargin),NEWLINE 'initialMarginPercentage': self.parse_number(initialMarginPercentage),NEWLINE 'maintenanceMargin': self.safe_number(position, 'maintMargin'),NEWLINE 'maintenanceMarginPercentage': self.safe_string(position, 'maintMarginReq'),NEWLINE 'entryPrice': self.safe_number(position, 'avgEntryPrice'),NEWLINE 'notional': self.parse_number(notional),NEWLINE 'leverage': self.parse_number(leverage),NEWLINE 'unrealizedPnl': self.parse_number(unrealisedPnl),NEWLINE 'contracts': self.parse_number(Precise.string_abs(size)),NEWLINE 'contractSize': self.safe_number(market, 'contractSize'),NEWLINE # realisedPnl: position['realised_pnl'],NEWLINE 'marginRatio': None,NEWLINE 'liquidationPrice': self.safe_number(position, 'liquidationPrice'),NEWLINE 'markPrice': self.safe_number(position, 'markPrice'),NEWLINE 'collateral': self.safe_number(position, 'posInit'),NEWLINE 'marginType': None,NEWLINE 'side': side,NEWLINE 'percentage': self.parse_number(Precise.string_div(unrealisedPnl, initialMargin)),NEWLINE }NEWLINENEWLINE def create_order(self, symbol, type, side, amount, price=None, params={}):NEWLINE self.load_markets()NEWLINE market = self.market(symbol)NEWLINE # required param, cannot be used twiceNEWLINE clientOrderId = self.safe_string_2(params, 'clientOid', 'clientOrderId', self.uuid())NEWLINE params = self.omit(params, ['clientOid', 'clientOrderId'])NEWLINE leverage = self.safe_number(params, 'leverage')NEWLINE if not leverage:NEWLINE raise ArgumentsRequired(self.id + ' createOrder requires params.leverage')NEWLINE if amount < 1:NEWLINE raise InvalidOrder('Minimum contract order size using ' + self.id + ' is 1')NEWLINE preciseAmount = int(self.amount_to_precision(symbol, amount))NEWLINE request = {NEWLINE 'clientOid': clientOrderId,NEWLINE 'side': side,NEWLINE 'symbol': market['id'],NEWLINE 'type': type, # limit or marketNEWLINE 'size': preciseAmount,NEWLINE # 'remark': '', # optional remark for the order, length cannot exceed 100 utf8 charactersNEWLINE # 'tradeType': 'TRADE', # TRADE, MARGIN_TRADE # not used with margin ordersNEWLINE # limit orders ---------------------------------------------------NEWLINE # 'timeInForce': 'GTC', # GTC, GTT, IOC, or FOK(default is GTC), limit orders onlyNEWLINE # 'cancelAfter': long, # cancel after n seconds, requires timeInForce to be GTTNEWLINE # 'postOnly': False, # Post only flag, invalid when timeInForce is IOC or FOKNEWLINE # 'hidden': False, # Order will not be displayed in the order bookNEWLINE # 'iceberg': False, # Only a portion of the order is displayed in the order bookNEWLINE # 'visibleSize': self.amount_to_precision(symbol, visibleSize), # The maximum visible size of an iceberg orderNEWLINE # market orders --------------------------------------------------NEWLINE # 'funds': self.cost_to_precision(symbol, cost), # Amount of quote currency to useNEWLINE # stop orders ----------------------------------------------------NEWLINE # 'stop': 'loss', # loss or entry, the default is loss, requires stopPriceNEWLINE # 'stopPrice': self.price_to_precision(symbol, amount), # need to be defined if stop is specifiedNEWLINE # 'stopPriceType' # Either TP, IP or MP, Need to be defined if stop is specified.NEWLINE # margin orders --------------------------------------------------NEWLINE # 'marginMode': 'cross', # cross(cross mode) and isolated(isolated mode), set to cross by default, the isolated mode will be released soon, stay tunedNEWLINE # 'autoBorrow': False, # The system will first borrow you funds at the optimal interest rate and then place an order for youNEWLINE # futures orders -------------------------------------------------NEWLINE # reduceOnly #(boolean) A mark to reduce the position size only. Set to False by default. Need to set the position size when reduceOnly is True.NEWLINE # closeOrder #(boolean) A mark to close the position. Set to False by default. It will close all the positions when closeOrder is True.NEWLINE # forceHold #(boolean) A mark to forcely hold the funds for an order, even though it's an order to reduce the position size. This helps the order stay on the order book and not get canceled when the position size changes. Set to False by default.NEWLINE }NEWLINE stopPrice = self.safe_number(params, 'stopPrice')NEWLINE if stopPrice:NEWLINE request['stop'] = side.upper() == 'down' if 'BUY' else 'up'NEWLINE stopPriceType = self.safe_string(params, 'stopPriceType')NEWLINE if not stopPriceType:NEWLINE raise ArgumentsRequired(self.id + ' trigger orders require params.stopPriceType to be set to TP, IP or MP(Trade Price, Index Price or Mark Price)')NEWLINE uppercaseType = type.upper()NEWLINE timeInForce = self.safe_string(params, 'timeInForce')NEWLINE if uppercaseType == 'LIMIT':NEWLINE if price is None:NEWLINE raise ArgumentsRequired(self.id + ' limit orders require the price argument')NEWLINE else:NEWLINE request['price'] = self.price_to_precision(symbol, price)NEWLINE if timeInForce is not None:NEWLINE timeInForce = timeInForce.upper()NEWLINE request['timeInForce'] = timeInForceNEWLINE postOnly = self.safe_value(params, 'postOnly', False)NEWLINE hidden = self.safe_value(params, 'hidden')NEWLINE if postOnly and hidden is not None:NEWLINE raise BadRequest(self.id + ' createOrder cannot contain both params.postOnly and params.hidden')NEWLINE iceberg = self.safe_value(params, 'iceberg')NEWLINE if iceberg:NEWLINE visibleSize = self.safe_value(params, 'visibleSize')NEWLINE if visibleSize is None:NEWLINE raise ArgumentsRequired(self.id + ' requires params.visibleSize for iceberg orders')NEWLINE params = self.omit(params, 'timeInForce') # Time in force only valid for limit orders, exchange error when gtc for market ordersNEWLINE response = self.futuresPrivatePostOrders(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # code: "200000",NEWLINE # data: {NEWLINE # orderId: "619717484f1d010001510cde",NEWLINE # },NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data', {})NEWLINE return {NEWLINE 'id': self.safe_string(data, 'orderId'),NEWLINE 'clientOrderId': clientOrderId,NEWLINE 'timestamp': None,NEWLINE 'datetime': None,NEWLINE 'lastTradeTimestamp': None,NEWLINE 'symbol': symbol,NEWLINE 'type': type,NEWLINE 'side': side,NEWLINE 'price': price,NEWLINE 'amount': preciseAmount,NEWLINE 'cost': None,NEWLINE 'average': None,NEWLINE 'filled': None,NEWLINE 'remaining': None,NEWLINE 'status': None,NEWLINE 'fee': None,NEWLINE 'trades': None,NEWLINE 'timeInForce': timeInForce,NEWLINE 'postOnly': postOnly,NEWLINE 'stopPrice': stopPrice,NEWLINE 'info': data,NEWLINE }NEWLINENEWLINE def cancel_order(self, id, symbol=None, params={}):NEWLINE self.load_markets()NEWLINE request = {NEWLINE 'orderId': id,NEWLINE }NEWLINE response = self.futuresPrivateDeleteOrdersOrderId(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # code: "200000",NEWLINE # data: {NEWLINE # cancelledOrderIds: [NEWLINE # "619714b8b6353000014c505a",NEWLINE # ],NEWLINE # },NEWLINE # }NEWLINE #NEWLINE return self.safe_value(response, 'data')NEWLINENEWLINE def cancel_all_orders(self, symbol=None, params={}):NEWLINE self.load_markets()NEWLINE request = {}NEWLINE if symbol is not None:NEWLINE request['symbol'] = self.market_id(symbol)NEWLINE response = self.futuresPrivateDeleteOrders(self.extend(request, params))NEWLINE # ? futuresPrivateDeleteStopOrdersNEWLINE # {NEWLINE # code: "200000",NEWLINE # data: {NEWLINE # cancelledOrderIds: [NEWLINE # "619714b8b6353000014c505a",NEWLINE # ],NEWLINE # },NEWLINE # }NEWLINE #NEWLINE return self.safe_value(response, 'data')NEWLINENEWLINE def fetch_orders_by_status(self, status, symbol=None, since=None, limit=None, params={}):NEWLINE self.load_markets()NEWLINE request = {NEWLINE 'status': status,NEWLINE }NEWLINE market = NoneNEWLINE if symbol is not None:NEWLINE market = self.market(symbol)NEWLINE request['symbol'] = market['id']NEWLINE if since is not None:NEWLINE request['startAt'] = sinceNEWLINE response = self.futuresPrivateGetOrders(self.extend(request, params))NEWLINE responseData = self.safe_value(response, 'data', {})NEWLINE orders = self.safe_value(responseData, 'items', [])NEWLINE return self.parse_orders(orders, market, since, limit)NEWLINENEWLINE def fetch_order(self, id=None, symbol=None, params={}):NEWLINE self.load_markets()NEWLINE request = {}NEWLINE method = 'futuresPrivateGetOrdersOrderId'NEWLINE if id is None:NEWLINE clientOrderId = self.safe_string_2(params, 'clientOid', 'clientOrderId')NEWLINE if clientOrderId is None:NEWLINE raise InvalidOrder(self.id + ' fetchOrder() requires parameter id or params.clientOid')NEWLINE request['clientOid'] = clientOrderIdNEWLINE method = 'futuresPrivateGetOrdersByClientOid'NEWLINE params = self.omit(params, ['clientOid', 'clientOrderId'])NEWLINE else:NEWLINE request['orderId'] = idNEWLINE response = getattr(self, method)(self.extend(request, params))NEWLINE market = symbol is not self.market(symbol) if None else NoneNEWLINE responseData = self.safe_value(response, 'data')NEWLINE return self.parse_order(responseData, market)NEWLINENEWLINE def parse_order(self, order, market=None):NEWLINE marketId = self.safe_string(order, 'symbol')NEWLINE symbol = self.safe_symbol(marketId, market, '-')NEWLINE orderId = self.safe_string(order, 'id')NEWLINE type = self.safe_string(order, 'type')NEWLINE timestamp = self.safe_integer(order, 'createdAt')NEWLINE datetime = self.iso8601(timestamp)NEWLINE price = self.safe_string(order, 'price')NEWLINE # price is zero for market orderNEWLINE # omitZero is called in safeOrder2NEWLINE side = self.safe_string(order, 'side')NEWLINE feeCurrencyId = self.safe_string(order, 'feeCurrency')NEWLINE feeCurrency = self.safe_currency_code(feeCurrencyId)NEWLINE feeCost = self.safe_number(order, 'fee')NEWLINE amount = self.safe_string(order, 'size')NEWLINE filled = self.safe_string(order, 'dealSize')NEWLINE cost = self.safe_string(order, 'dealFunds')NEWLINE # boolNEWLINE isActive = self.safe_value(order, 'isActive', False)NEWLINE cancelExist = self.safe_value(order, 'cancelExist', False)NEWLINE status = 'open' if isActive else 'closed'NEWLINE status = 'canceled' if cancelExist else statusNEWLINE fee = {NEWLINE 'currency': feeCurrency,NEWLINE 'cost': feeCost,NEWLINE }NEWLINE clientOrderId = self.safe_string(order, 'clientOid')NEWLINE timeInForce = self.safe_string(order, 'timeInForce')NEWLINE stopPrice = self.safe_number(order, 'stopPrice')NEWLINE postOnly = self.safe_value(order, 'postOnly')NEWLINE return self.safeOrder2({NEWLINE 'id': orderId,NEWLINE 'clientOrderId': clientOrderId,NEWLINE 'symbol': symbol,NEWLINE 'type': type,NEWLINE 'timeInForce': timeInForce,NEWLINE 'postOnly': postOnly,NEWLINE 'side': side,NEWLINE 'amount': amount,NEWLINE 'price': price,NEWLINE 'stopPrice': stopPrice,NEWLINE 'cost': cost,NEWLINE 'filled': filled,NEWLINE 'remaining': None,NEWLINE 'timestamp': timestamp,NEWLINE 'datetime': datetime,NEWLINE 'fee': fee,NEWLINE 'status': status,NEWLINE 'info': order,NEWLINE 'lastTradeTimestamp': None,NEWLINE 'average': None,NEWLINE 'trades': None,NEWLINE }, market)NEWLINENEWLINE def fetch_funding_rate(self, symbol, params={}):NEWLINE self.load_markets()NEWLINE request = {NEWLINE 'symbol': self.market_id(symbol),NEWLINE }NEWLINE response = self.futuresPublicGetFundingRateSymbolCurrent(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # code: "200000",NEWLINE # data: {NEWLINE # symbol: ".ETHUSDTMFPI8H",NEWLINE # granularity: 28800000,NEWLINE # timePoint: 1637380800000,NEWLINE # value: 0.0001,NEWLINE # predictedValue: 0.0001,NEWLINE # },NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data')NEWLINE timestamp = self.safe_number(data, 'timePoint')NEWLINE return {NEWLINE 'info': data,NEWLINE 'symbol': symbol,NEWLINE 'markPrice': None,NEWLINE 'indexPrice': None,NEWLINE 'interestRate': None,NEWLINE 'estimatedSettlePrice': None,NEWLINE 'timestamp': None,NEWLINE 'datetime': None,NEWLINE 'previousFundingRate': self.safe_number(data, 'value'),NEWLINE 'nextFundingRate': self.safe_number(data, 'predictedValue'),NEWLINE 'previousFundingTimestamp': timestamp,NEWLINE 'nextFundingTimestamp': None,NEWLINE 'previousFundingDatetime': self.iso8601(timestamp),NEWLINE 'nextFundingDatetime': None,NEWLINE }NEWLINENEWLINE def fetch_balance(self, params={}):NEWLINE self.load_markets()NEWLINE # only fetches one balance at a timeNEWLINE # by default it will only fetch the BTC balance of the futures accountNEWLINE # you can send 'currency' in params to fetch other currenciesNEWLINE # fetchBalance({'type': 'futures', 'currency': 'USDT'})NEWLINE response = self.futuresPrivateGetAccountOverview(params)NEWLINE #NEWLINE # {NEWLINE # code: '200000',NEWLINE # data: {NEWLINE # accountEquity: 0.00005,NEWLINE # unrealisedPNL: 0,NEWLINE # marginBalance: 0.00005,NEWLINE # positionMargin: 0,NEWLINE # orderMargin: 0,NEWLINE # frozenFunds: 0,NEWLINE # availableBalance: 0.00005,NEWLINE # currency: 'XBT'NEWLINE # }NEWLINE # }NEWLINE #NEWLINE result = {NEWLINE 'info': response,NEWLINE 'timestamp': None,NEWLINE 'datetime': None,NEWLINE }NEWLINE data = self.safe_value(response, 'data')NEWLINE currencyId = self.safe_string(data, 'currency')NEWLINE code = self.safe_currency_code(currencyId)NEWLINE account = self.account()NEWLINE account['free'] = self.safe_string(data, 'availableBalance')NEWLINE account['total'] = self.safe_string(data, 'accountEquity')NEWLINE result[code] = accountNEWLINE return self.parse_balance(result)NEWLINENEWLINE def transfer(self, code, amount, fromAccount, toAccount, params={}):NEWLINE if (toAccount != 'spot' and toAccount != 'trade' and toAccount != 'trading') or (fromAccount != 'futures' and fromAccount != 'contract'):NEWLINE raise BadRequest(self.id + ' only supports transfers from contract(futures) account to trade(spot) account')NEWLINE return self.transfer_out(code, amount, params)NEWLINENEWLINE def transfer_out(self, code, amount, params={}):NEWLINE self.load_markets()NEWLINE currency = self.currency(code)NEWLINE request = {NEWLINE 'currency': self.safe_string(currency, 'id'), # Currency,including XBT,USDTNEWLINE 'amount': amount,NEWLINE }NEWLINE # transfer from usdm futures wallet to spot walletNEWLINE response = self.futuresPrivatePostTransferOut(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "applyId": "5bffb63303aa675e8bbe18f9" # Transfer-out request IDNEWLINE # }NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data')NEWLINE timestamp = self.safe_string(data, 'updatedAt')NEWLINE return {NEWLINE 'info': response,NEWLINE 'id': self.safe_string(data, 'applyId'),NEWLINE 'timestamp': timestamp,NEWLINE 'datetime': self.iso8601(timestamp),NEWLINE 'currency': code,NEWLINE 'amount': amount,NEWLINE 'fromAccount': 'futures',NEWLINE 'toAccount': 'spot',NEWLINE 'status': self.safe_string(data, 'status'),NEWLINE }NEWLINENEWLINE def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):NEWLINE self.load_markets()NEWLINE request = {NEWLINE # orderId(String) [optional] Fills for a specific order(other parameters can be ignored if specified)NEWLINE # symbol(String) [optional] Symbol of the contractNEWLINE # side(String) [optional] buy or sellNEWLINE # type(String) [optional] limit, market, limit_stop or market_stopNEWLINE # startAt(long) [optional] Start time(milisecond)NEWLINE # endAt(long) [optional] End time(milisecond)NEWLINE }NEWLINE market = NoneNEWLINE if symbol is not None:NEWLINE market = self.market(symbol)NEWLINE request['symbol'] = market['id']NEWLINE if since is not None:NEWLINE request['startAt'] = sinceNEWLINE response = self.futuresPrivateGetFills(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": {NEWLINE # "currentPage": 1,NEWLINE # "pageSize": 1,NEWLINE # "totalNum": 251915,NEWLINE # "totalPage": 251915,NEWLINE # "items": [NEWLINE # {NEWLINE # "symbol": "XBTUSDM", # Ticker symbol of the contractNEWLINE # "tradeId": "5ce24c1f0c19fc3c58edc47c", # Trade IDNEWLINE # "orderId": "5ce24c16b210233c36ee321d", # Order IDNEWLINE # "side": "sell", # Transaction sideNEWLINE # "liquidity": "taker", # Liquidity- taker or makerNEWLINE # "price": "8302", # Filled priceNEWLINE # "size": 10, # Filled amountNEWLINE # "value": "0.001204529", # Order valueNEWLINE # "feeRate": "0.0005", # Floating feesNEWLINE # "fixFee": "0.00000006", # Fixed feesNEWLINE # "feeCurrency": "XBT", # Charging currencyNEWLINE # "stop": "", # A mark to the stop order typeNEWLINE # "fee": "0.0000012022", # Transaction feeNEWLINE # "orderType": "limit", # Order typeNEWLINE # "tradeType": "trade", # Trade type(trade, liquidation, ADL or settlement)NEWLINE # "createdAt": 1558334496000, # Time the order createdNEWLINE # "settleCurrency": "XBT", # settlement currencyNEWLINE # "tradeTime": 1558334496000000000 # trade time in nanosecondNEWLINE # }NEWLINE # ]NEWLINE # }NEWLINE # }NEWLINE #NEWLINE data = self.safe_value(response, 'data', {})NEWLINE trades = self.safe_value(data, 'items', {})NEWLINE return self.parse_trades(trades, market, since, limit)NEWLINENEWLINE def fetch_trades(self, symbol, since=None, limit=None, params={}):NEWLINE self.load_markets()NEWLINE market = self.market(symbol)NEWLINE request = {NEWLINE 'symbol': market['id'],NEWLINE }NEWLINE response = self.futuresPublicGetTradeHistory(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # "code": "200000",NEWLINE # "data": [NEWLINE # {NEWLINE # "sequence": 32114961,NEWLINE # "side": "buy",NEWLINE # "size": 39,NEWLINE # "price": "4001.6500000000",NEWLINE # "takerOrderId": "61c20742f172110001e0ebe4",NEWLINE # "makerOrderId": "61c2073fcfc88100010fcb5d",NEWLINE # "tradeId": "61c2074277a0c473e69029b8",NEWLINE # "ts": 1640105794099993896 # filled timeNEWLINE # }NEWLINE # ]NEWLINE # }NEWLINE #NEWLINE trades = self.safe_value(response, 'data', [])NEWLINE return self.parse_trades(trades, market, since, limit)NEWLINENEWLINE def parse_trade(self, trade, market=None):NEWLINE #NEWLINE # fetchTrades(public)NEWLINE #NEWLINE # {NEWLINE # "sequence": 32114961,NEWLINE # "side": "buy",NEWLINE # "size": 39,NEWLINE # "price": "4001.6500000000",NEWLINE # "takerOrderId": "61c20742f172110001e0ebe4",NEWLINE # "makerOrderId": "61c2073fcfc88100010fcb5d",NEWLINE # "tradeId": "61c2074277a0c473e69029b8",NEWLINE # "ts": 1640105794099993896 # filled timeNEWLINE # }NEWLINE #NEWLINE # fetchMyTrades(private) v2NEWLINE #NEWLINE # {NEWLINE # "symbol":"BTC-USDT",NEWLINE # "tradeId":"5c35c02709e4f67d5266954e",NEWLINE # "orderId":"5c35c02703aa673ceec2a168",NEWLINE # "counterOrderId":"5c1ab46003aa676e487fa8e3",NEWLINE # "side":"buy",NEWLINE # "liquidity":"taker",NEWLINE # "forceTaker":true,NEWLINE # "price":"0.083",NEWLINE # "size":"0.8424304",NEWLINE # "funds":"0.0699217232",NEWLINE # "fee":"0",NEWLINE # "feeRate":"0",NEWLINE # "feeCurrency":"USDT",NEWLINE # "stop":"",NEWLINE # "type":"limit",NEWLINE # "createdAt":1547026472000NEWLINE # }NEWLINE #NEWLINE marketId = self.safe_string(trade, 'symbol')NEWLINE symbol = self.safe_symbol(marketId, market, '-')NEWLINE id = self.safe_string_2(trade, 'tradeId', 'id')NEWLINE orderId = self.safe_string(trade, 'orderId')NEWLINE takerOrMaker = self.safe_string(trade, 'liquidity')NEWLINE timestamp = self.safe_integer(trade, 'time')NEWLINE if timestamp is not None:NEWLINE timestamp = int(timestamp / 1000000)NEWLINE else:NEWLINE timestamp = self.safe_integer(trade, 'createdAt')NEWLINE # if it's a historical v1 trade, the exchange returns timestamp in secondsNEWLINE if ('dealValue' in trade) and (timestamp is not None):NEWLINE timestamp = timestamp * 1000NEWLINE priceString = self.safe_string_2(trade, 'price', 'dealPrice')NEWLINE amountString = self.safe_string_2(trade, 'size', 'amount')NEWLINE price = self.parse_number(priceString)NEWLINE amount = self.parse_number(amountString)NEWLINE side = self.safe_string(trade, 'side')NEWLINE fee = NoneNEWLINE feeCost = self.safe_number(trade, 'fee')NEWLINE if feeCost is not None:NEWLINE feeCurrencyId = self.safe_string(trade, 'feeCurrency')NEWLINE feeCurrency = self.safe_currency_code(feeCurrencyId)NEWLINE if feeCurrency is None:NEWLINE if market is not None:NEWLINE feeCurrency = market['quote'] if (side == 'sell') else market['base']NEWLINE fee = {NEWLINE 'cost': feeCost,NEWLINE 'currency': feeCurrency,NEWLINE 'rate': self.safe_number(trade, 'feeRate'),NEWLINE }NEWLINE type = self.safe_string_2(trade, 'type', 'orderType')NEWLINE if type == 'match':NEWLINE type = NoneNEWLINE cost = self.safe_number_2(trade, 'funds', 'dealValue')NEWLINE if cost is None:NEWLINE market = self.market(symbol)NEWLINE contractSize = self.safe_string(market, 'contractSize')NEWLINE contractCost = Precise.string_mul(priceString, amountString)NEWLINE if contractSize and contractCost:NEWLINE cost = self.parse_number(Precise.string_mul(contractCost, contractSize))NEWLINE return {NEWLINE 'info': trade,NEWLINE 'id': id,NEWLINE 'order': orderId,NEWLINE 'timestamp': timestamp,NEWLINE 'datetime': self.iso8601(timestamp),NEWLINE 'symbol': symbol,NEWLINE 'type': type,NEWLINE 'takerOrMaker': takerOrMaker,NEWLINE 'side': side,NEWLINE 'price': price,NEWLINE 'amount': amount,NEWLINE 'cost': cost,NEWLINE 'fee': fee,NEWLINE }NEWLINENEWLINE def fetch_deposits(self, code=None, since=None, limit=None, params={}):NEWLINE self.load_markets()NEWLINE request = {}NEWLINE currency = NoneNEWLINE if code is not None:NEWLINE currency = self.currency(code)NEWLINE request['currency'] = currency['id']NEWLINE if limit is not None:NEWLINE request['pageSize'] = limitNEWLINE if since is not None:NEWLINE request['startAt'] = sinceNEWLINE response = self.futuresPrivateGetDepositList(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # code: '200000',NEWLINE # data: {NEWLINE # "currentPage": 1,NEWLINE # "pageSize": 5,NEWLINE # "totalNum": 2,NEWLINE # "totalPage": 1,NEWLINE # "items": [NEWLINE # {NEWLINE # "address": "0x5f047b29041bcfdbf0e4478cdfa753a336ba6989",NEWLINE # "memo": "5c247c8a03aa677cea2a251d",NEWLINE # "amount": 1,NEWLINE # "fee": 0.0001,NEWLINE # "currency": "KCS",NEWLINE # "isInner": False,NEWLINE # "walletTxId": "5bbb57386d99522d9f954c5a@test004",NEWLINE # "status": "SUCCESS",NEWLINE # "createdAt": 1544178843000,NEWLINE # "updatedAt": 1544178891000NEWLINE # "remark":"foobar"NEWLINE # },NEWLINE # ...NEWLINE # ]NEWLINE # }NEWLINE # }NEWLINE #NEWLINE responseData = response['data']['items']NEWLINE return self.parse_transactions(responseData, currency, since, limit, {'type': 'deposit'})NEWLINENEWLINE def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):NEWLINE self.load_markets()NEWLINE request = {}NEWLINE currency = NoneNEWLINE if code is not None:NEWLINE currency = self.currency(code)NEWLINE request['currency'] = currency['id']NEWLINE if limit is not None:NEWLINE request['pageSize'] = limitNEWLINE if since is not None:NEWLINE request['startAt'] = sinceNEWLINE response = self.futuresPrivateGetWithdrawalList(self.extend(request, params))NEWLINE #NEWLINE # {NEWLINE # code: '200000',NEWLINE # data: {NEWLINE # "currentPage": 1,NEWLINE # "pageSize": 5,NEWLINE # "totalNum": 2,NEWLINE # "totalPage": 1,NEWLINE # "items": [NEWLINE # {NEWLINE # "id": "5c2dc64e03aa675aa263f1ac",NEWLINE # "address": "0x5bedb060b8eb8d823e2414d82acce78d38be7fe9",NEWLINE # "memo": "",NEWLINE # "currency": "ETH",NEWLINE # "amount": 1.0000000,NEWLINE # "fee": 0.0100000,NEWLINE # "walletTxId": "3e2414d82acce78d38be7fe9",NEWLINE # "isInner": False,NEWLINE # "status": "FAILURE",NEWLINE # "createdAt": 1546503758000,NEWLINE # "updatedAt": 1546504603000NEWLINE # },NEWLINE # ...NEWLINE # ]NEWLINE # }NEWLINE # }NEWLINE #NEWLINE responseData = response['data']['items']NEWLINE return self.parse_transactions(responseData, currency, since, limit, {'type': 'withdrawal'})NEWLINENEWLINE def fetch_funding_fee(self, code, params={}):NEWLINE raise BadRequest(self.id + ' has no method fetchFundingFee')NEWLINENEWLINE def fetch_ledger(self, code=None, since=None, limit=None, params={}):NEWLINE raise BadRequest(self.id + ' has no method fetchLedger')NEWLINE
import torchNEWLINEimport torch.nn as nnNEWLINEimport torch.nn.functional as FNEWLINEfrom mmcv.cnn import normal_init, kaiming_initNEWLINEimport numpy as npNEWLINENEWLINEfrom mmdet.ops import ModulatedDeformConvPack, RoIAlign, soft_nmsNEWLINEfrom mmdet.core import multi_apply, bbox_areas, force_fp32NEWLINEfrom mmdet.core.utils.summary import write_txtNEWLINEfrom mmdet.core.anchor.guided_anchor_target import calc_regionNEWLINEfrom mmdet.models.losses import ct_focal_loss, giou_loss, diou_loss, ciou_lossNEWLINEfrom mmdet.models.utils import (build_norm_layer, bias_init_with_prob, ConvModule,NEWLINE simple_nms, build_conv_layer, SEBlock)NEWLINEfrom .anchor_head import AnchorHeadNEWLINEfrom ..registry import HEADSNEWLINENEWLINENEWLINE@HEADS.register_moduleNEWLINEclass TTFHead(AnchorHead):NEWLINENEWLINE def __init__(self,NEWLINE inplanes=(64, 128, 256, 512),NEWLINE planes=(256, 128, 64),NEWLINE down_ratio=4,NEWLINE head_conv=256,NEWLINE wh_conv=64,NEWLINE hm_head_conv_num=2,NEWLINE wh_head_conv_num=2,NEWLINE num_classes=81,NEWLINE shortcut_kernel=3,NEWLINE conv_cfg=None,NEWLINE head_conv_size=3,NEWLINE use_trident=False,NEWLINE use_dla=False,NEWLINE wh_sym=False,NEWLINE upsample_vanilla_conv=False,NEWLINE upsample_multiscale_conv=False,NEWLINE up_conv_cfg=None,NEWLINE norm_cfg=dict(type='BN'),NEWLINE shortcut_cfg=(1, 2, 3),NEWLINE wh_offset_base=16.,NEWLINE wh_area_process='log',NEWLINE wh_agnostic=True,NEWLINE wh_gaussian=True,NEWLINE box_size_range=None,NEWLINE two_stage=False,NEWLINE alpha=0.54,NEWLINE beta=0.54,NEWLINE hm_weight=1.,NEWLINE dcn_mean=False,NEWLINE iou_type='giou',NEWLINE use_simple_nms=True,NEWLINE aug_reg=False,NEWLINE hm_last_3x3=False,NEWLINE hm_last_3x3_d2=False,NEWLINE hm_last_se3x3=False,NEWLINE hm_last_5x5=False,NEWLINE hm_last_7x7=False,NEWLINE no_wh_se=False,NEWLINE wh_weight=5.,NEWLINE max_objs=128):NEWLINE super(AnchorHead, self).__init__()NEWLINE assert len(planes) in [2, 3, 4]NEWLINE shortcut_num = min(len(inplanes) - 1, len(planes))NEWLINE assert shortcut_num == len(shortcut_cfg)NEWLINE assert wh_area_process in [None, 'norm', 'log', 'sqrt']NEWLINENEWLINE self.planes = planesNEWLINE self.head_conv = head_convNEWLINE self.num_classes = num_classesNEWLINE self.conv_cfg = conv_cfgNEWLINE self.head_conv_size = head_conv_sizeNEWLINE self.use_trident = use_tridentNEWLINE self.use_dla = use_dlaNEWLINE self.wh_sym = wh_symNEWLINE self.upsample_vanilla_conv = upsample_vanilla_convNEWLINE self.upsample_multiscale_conv = upsample_multiscale_convNEWLINE self.up_conv_cfg = up_conv_cfgNEWLINE self.wh_offset_base = wh_offset_baseNEWLINE self.wh_area_process = wh_area_processNEWLINE self.wh_agnostic = wh_agnosticNEWLINE self.wh_gaussian = wh_gaussianNEWLINE self.box_size_range = box_size_rangeNEWLINE self.two_stage = two_stageNEWLINE self.alpha = alphaNEWLINE self.beta = betaNEWLINE self.hm_weight = hm_weightNEWLINE self.dcn_mean = dcn_meanNEWLINE self.iou_loss = eval(iou_type + '_loss')NEWLINE self.use_simple_nms = use_simple_nmsNEWLINE self.aug_reg = aug_regNEWLINE self.hm_last_3x3 = hm_last_3x3NEWLINE self.hm_last_3x3_d2 = hm_last_3x3_d2NEWLINE self.hm_last_se3x3 = hm_last_se3x3NEWLINE self.no_wh_se = no_wh_seNEWLINE self.hm_last_5x5 = hm_last_5x5NEWLINE self.hm_last_7x7 = hm_last_7x7NEWLINE self.wh_weight = wh_weightNEWLINE self.max_objs = max_objsNEWLINE self.fp16_enabled = FalseNEWLINENEWLINE self.down_ratio = down_ratioNEWLINE self.num_fg = num_classes - 1NEWLINE self.wh_planes = 4 if wh_agnostic else 4 * self.num_fgNEWLINE self.base_loc = NoneNEWLINENEWLINE # repeat upsampling n times. 32x to 4x by default.NEWLINE self.deconv_layers = nn.ModuleList([NEWLINE self.build_upsample(inplanes[-1], planes[0], norm_cfg=norm_cfg),NEWLINE self.build_upsample(planes[0], planes[1], norm_cfg=norm_cfg)NEWLINE ])NEWLINE for i in range(2, len(planes)):NEWLINE self.deconv_layers.append(NEWLINE self.build_upsample(planes[i - 1], planes[i],NEWLINE norm_cfg=norm_cfg, no_upsample=(down_ratio == 8)))NEWLINENEWLINE padding = (shortcut_kernel - 1) // 2NEWLINE self.shortcut_layers = self.build_shortcut(NEWLINE inplanes[:-1][::-1][:shortcut_num], planes[:shortcut_num], shortcut_cfg,NEWLINE kernel_size=shortcut_kernel, padding=padding)NEWLINENEWLINE # headsNEWLINE self.wh = self.build_head(self.wh_planes, wh_head_conv_num,NEWLINE head_conv_plane=wh_conv, use_sym_conv=wh_sym)NEWLINE self.hm = self.build_head(self.num_fg, hm_head_conv_num)NEWLINE if two_stage:NEWLINE assert wh_agnosticNEWLINE self.align = RoIAlign(7, spatial_scale=1 / 4., sample_num=2)NEWLINE self.wh2 = nn.Sequential(ConvModule(self.planes[-1], 32, 5, norm_cfg=norm_cfg), # 3x3NEWLINE ConvModule(32, 32, 3, norm_cfg=norm_cfg),NEWLINE ConvModule(32, 32, 1),NEWLINE ConvModule(32, 4, 1, activation=None))NEWLINENEWLINE def build_shortcut(self,NEWLINE inplanes,NEWLINE planes,NEWLINE shortcut_cfg,NEWLINE kernel_size=3,NEWLINE padding=1):NEWLINE assert len(inplanes) == len(planes) == len(shortcut_cfg)NEWLINENEWLINE shortcut_layers = nn.ModuleList()NEWLINE for i, (inp, outp, layer_num) in enumerate(zip(NEWLINE inplanes, planes, shortcut_cfg)):NEWLINE assert layer_num > 0NEWLINE layer = ShortcutConv2d(NEWLINE inp, outp, [kernel_size] * layer_num, [padding] * layer_num,NEWLINE down=(self.down_ratio == 8 and i == len(inplanes) - 1))NEWLINE shortcut_layers.append(layer)NEWLINE return shortcut_layersNEWLINENEWLINE def build_upsample(self, inplanes, planes, norm_cfg=None, no_upsample=False):NEWLINE if self.upsample_vanilla_conv:NEWLINE if isinstance(self.upsample_vanilla_conv, int):NEWLINE padding = int((self.upsample_vanilla_conv - 1) / 2)NEWLINE dila = paddingNEWLINE mdcn = nn.Conv2d(inplanes, planes, 3, stride=1, padding=padding, dilation=dila)NEWLINE else:NEWLINE mdcn = nn.Conv2d(inplanes, planes, 3, stride=1, padding=1)NEWLINE elif self.upsample_multiscale_conv:NEWLINE mdcn = build_conv_layer(dict(type='MultiScaleConv'), inplanes, planes)NEWLINE elif self.use_trident:NEWLINE mdcn = build_conv_layer(dict(type='TriConv'), inplanes, planes)NEWLINE elif self.up_conv_cfg:NEWLINE mdcn = build_conv_layer(self.up_conv_cfg, inplanes, planes)NEWLINE else:NEWLINE mdcn = ModulatedDeformConvPack(inplanes, planes, 3, offset_mean=self.dcn_mean, stride=1,NEWLINE padding=1, dilation=1, deformable_groups=1)NEWLINE layers = []NEWLINE layers.append(mdcn)NEWLINE if norm_cfg:NEWLINE layers.append(build_norm_layer(norm_cfg, planes)[1])NEWLINE layers.append(nn.ReLU(inplace=True))NEWLINE if not no_upsample:NEWLINE up = nn.UpsamplingBilinear2d(scale_factor=2)NEWLINE layers.append(up)NEWLINENEWLINE return nn.Sequential(*layers)NEWLINENEWLINE def build_head(self, out_channel, conv_num=1, head_conv_plane=None, use_sym_conv=False):NEWLINE head_convs = []NEWLINE head_conv_plane = self.head_conv if not head_conv_plane else head_conv_planeNEWLINE for i in range(conv_num):NEWLINE inp = self.planes[-1] if i == 0 else head_conv_planeNEWLINE head_convs.append(ConvModule(inp, head_conv_plane,NEWLINE self.head_conv_size, conv_cfg=self.conv_cfg, padding=1))NEWLINENEWLINE inp = self.planes[-1] if conv_num <= 0 else head_conv_planeNEWLINE if use_sym_conv:NEWLINE assert out_channel == 4NEWLINE head_convs.append(nn.Conv2d(inp, out_channel, 3, padding=1))NEWLINE # head_convs.append(ConvModule(inp, out_channel, 3, conv_cfg=dict(type='WHSymConv')))NEWLINE else:NEWLINE if self.hm_last_3x3:NEWLINE head_convs.append(nn.Conv2d(inp, out_channel, 3, padding=1))NEWLINE elif self.hm_last_3x3_d2:NEWLINE head_convs.append(nn.Conv2d(inp, out_channel, 3, padding=2, dilation=2))NEWLINE elif self.hm_last_5x5:NEWLINE head_convs.append(nn.Conv2d(inp, out_channel, 5, padding=2))NEWLINE elif self.hm_last_7x7:NEWLINE head_convs.append(nn.Conv2d(inp, out_channel, 7, padding=3))NEWLINE elif self.hm_last_se3x3:NEWLINE head_convs.append(nn.Conv2d(inp, out_channel, 3, padding=1))NEWLINE if not self.no_wh_se or out_channel != 4:NEWLINE head_convs.append(SEBlock(out_channel, compress_ratio=4))NEWLINE else:NEWLINE head_convs.append(nn.Conv2d(inp, out_channel, 1))NEWLINE return nn.Sequential(*head_convs)NEWLINENEWLINE def init_weights(self):NEWLINE for _, m in self.shortcut_layers.named_modules():NEWLINE if isinstance(m, nn.Conv2d):NEWLINE kaiming_init(m)NEWLINENEWLINE for _, m in self.deconv_layers.named_modules():NEWLINE if isinstance(m, nn.BatchNorm2d):NEWLINE nn.init.constant_(m.weight, 1)NEWLINE nn.init.constant_(m.bias, 0)NEWLINENEWLINE for _, m in self.hm.named_modules():NEWLINE if isinstance(m, nn.Conv2d):NEWLINE normal_init(m, std=0.01)NEWLINENEWLINE bias_cls = bias_init_with_prob(0.01)NEWLINE if self.hm_last_se3x3:NEWLINE normal_init(self.hm[-2], std=0.01, bias=bias_cls)NEWLINE else:NEWLINE normal_init(self.hm[-1], std=0.01, bias=bias_cls)NEWLINENEWLINE for _, m in self.wh.named_modules():NEWLINE if isinstance(m, nn.Conv2d):NEWLINE normal_init(m, std=0.001)NEWLINENEWLINE if self.two_stage:NEWLINE for _, m in self.wh2.named_modules():NEWLINE if isinstance(m, nn.Conv2d):NEWLINE normal_init(m, std=0.001)NEWLINENEWLINE def forward(self, feats):NEWLINE """NEWLINENEWLINE Args:NEWLINE feats: list(tensor).NEWLINENEWLINE Returns:NEWLINE hm: tensor, (batch, 80, h, w).NEWLINE wh: tensor, (batch, 4, h, w) or (batch, 80 * 4, h, w).NEWLINE """NEWLINE x = feats[-1]NEWLINE if not self.use_dla:NEWLINE for i, upsample_layer in enumerate(self.deconv_layers):NEWLINE x = upsample_layer(x)NEWLINE if i < len(self.shortcut_layers):NEWLINE shortcut = self.shortcut_layers[i](feats[-i - 2])NEWLINE x = x + shortcutNEWLINE hm = self.hm(x)NEWLINE wh = F.relu(self.wh(x)) * self.wh_offset_baseNEWLINE return x, hm, whNEWLINENEWLINE @force_fp32(apply_to=('pred_feat', 'pred_heatmap', 'pred_wh'))NEWLINE def get_bboxes(self,NEWLINE pred_feat,NEWLINE pred_heatmap,NEWLINE pred_wh,NEWLINE img_metas,NEWLINE cfg,NEWLINE rescale=False):NEWLINE batch, cat, height, width = pred_heatmap.size()NEWLINE pred_heatmap = pred_heatmap.detach().sigmoid_()NEWLINE wh = pred_wh.detach()NEWLINE # write_txt(pred_heatmap, filename='pred_hm', thre=0.001)NEWLINE # perform nms on heatmapsNEWLINE if self.use_simple_nms and not getattr(cfg, 'debug', False):NEWLINE heat = simple_nms(pred_heatmap) # used maxpool to filter the max scoreNEWLINE else:NEWLINE heat = pred_heatmapNEWLINE kernel = 3NEWLINE pad = (kernel - 1) // 2NEWLINE hmax = nn.functional.max_pool2d(heat, (kernel, kernel), stride=1, padding=pad)NEWLINE keep = (hmax == heat).float()NEWLINE keep_pad = keep.new_zeros(batch, cat, height + 2, width + 2)NEWLINE keep_pad[..., 1:-1, 1:-1] = keepNEWLINE keep = keep_padNEWLINE # keep = ((keep[..., :-2, :-2] + keep[..., :-2, 1:-1] + keep[..., :-2, 2:] +NEWLINE # keep[..., 1:-1, :-2] + keep[..., 1:-1, 1:-1] + keep[..., 1:-1, 2:] +NEWLINE # keep[..., 2:, :-2] + keep[..., 2:, 1:-1] + keep[..., 2:, 2:]) > 0).float()NEWLINE keep = ((keep[..., :-2, 1:-1] +NEWLINE keep[..., 1:-1, :-2] + keep[..., 1:-1, 1:-1] + keep[..., 1:-1, 2:] +NEWLINE keep[..., 2:, 1:-1]) > 0).float()NEWLINE heat = heat * keepNEWLINENEWLINE topk = getattr(cfg, 'max_per_img', 100)NEWLINE # (batch, topk)NEWLINE scores, inds, clses, ys, xs = self._topk(heat, topk=topk)NEWLINE xs = xs.view(batch, topk, -1, 1) * self.down_ratioNEWLINE ys = ys.view(batch, topk, -1, 1) * self.down_ratioNEWLINENEWLINE wh = wh.permute(0, 2, 3, 1).contiguous()NEWLINE wh = wh.view(wh.size(0), -1, wh.size(3))NEWLINE inds = inds.view(batch, -1, 1)NEWLINE wh_inds = inds.expand(*inds.shape[:-1], wh.size(2))NEWLINE wh = wh.gather(1, wh_inds)NEWLINENEWLINE if not self.wh_agnostic:NEWLINE wh = wh.view(-1, topk, self.num_fg, 4)NEWLINE wh = torch.gather(wh, 2, clses[..., None, None].expand(NEWLINE clses.size(0), clses.size(1), 1, 4).long())NEWLINENEWLINE wh = wh.view(batch, topk, -1, 4)NEWLINE clses = clses.view(batch, topk, 1).long()NEWLINE scores = scores.view(batch, topk, 1)NEWLINENEWLINE bboxes = torch.cat([xs - wh[..., [0]], ys - wh[..., [1]],NEWLINE xs + wh[..., [2]], ys + wh[..., [3]]], dim=-1)NEWLINE if self.aug_reg:NEWLINE heat = pred_heatmap.permute(0, 2, 3, 1).contiguous()NEWLINE heat = heat.view(heat.size(0), -1, heat.size(3))NEWLINE score_inds = inds.expand(*inds.shape[:-1], heat.size(2))NEWLINE area_scores = heat.gather(1, score_inds).view(batch, topk, -1, self.num_fg)NEWLINE area_scores = area_scores.gather(-1, clses.expand(NEWLINE *clses.shape[:-1], area_scores.size(-2)).unsqueeze(-1)).squeeze(-1)NEWLINENEWLINE bbox_weight = torch.cat([bboxes.new_ones((*bboxes.shape[:-2], 1)),NEWLINE torch.exp(-1 / (2 * (wh[..., 0, :] / 24) ** 2))],NEWLINE dim=-1) * area_scoresNEWLINE # print(bbox_weight)NEWLINE bboxes = (bboxes * bbox_weight.unsqueeze(-1)).sum(-2) / bbox_weight.sum(-1,NEWLINE keepdims=True)NEWLINE else:NEWLINE bboxes = bboxes.squeeze(-2)NEWLINENEWLINE clses = clses.float()NEWLINE roi_boxes = bboxes.new_tensor([])NEWLINE if self.two_stage:NEWLINE for batch_i in range(bboxes.shape[0]):NEWLINE vaid_pre_boxes_i = bboxes[batch_i] # (xx, 4)NEWLINE roi_boxes = torch.cat([NEWLINE roi_boxes, torch.cat([NEWLINE vaid_pre_boxes_i.new_ones([vaid_pre_boxes_i.size(0), 1]) * batch_i,NEWLINE vaid_pre_boxes_i], dim=1)], dim=0)NEWLINENEWLINE if roi_boxes.size(0) > 0:NEWLINE rois = self.align(pred_feat, roi_boxes) # (n, cha, 7, 7)NEWLINE pred_wh2 = self.wh2(rois).view(-1, 4)NEWLINE bboxes = bboxes.view(-1, 4)NEWLINE bboxes[:, [0, 1]] = bboxes[:, [0, 1]] - pred_wh2[:, [0, 1]] * 16NEWLINE bboxes[:, [2, 3]] = bboxes[:, [2, 3]] + pred_wh2[:, [2, 3]] * 16NEWLINE bboxes = bboxes.view(batch, topk, 4)NEWLINENEWLINE result_list = []NEWLINE score_thr = getattr(cfg, 'score_thr', 0.01)NEWLINE for batch_i in range(bboxes.shape[0]):NEWLINE scores_per_img = scores[batch_i]NEWLINE scores_keep = (scores_per_img > score_thr).squeeze(-1)NEWLINENEWLINE scores_per_img = scores_per_img[scores_keep]NEWLINE bboxes_per_img = bboxes[batch_i][scores_keep]NEWLINE labels_per_img = clses[batch_i][scores_keep].squeeze(-1)NEWLINE img_shape = img_metas[batch_i]['pad_shape']NEWLINE bboxes_per_img[:, 0::2] = bboxes_per_img[:, 0::2].clamp(min=0, max=img_shape[1] - 1)NEWLINE bboxes_per_img[:, 1::2] = bboxes_per_img[:, 1::2].clamp(min=0, max=img_shape[0] - 1)NEWLINENEWLINE if rescale:NEWLINE scale_factor = img_metas[batch_i]['scale_factor']NEWLINE bboxes_per_img /= bboxes_per_img.new_tensor(scale_factor)NEWLINENEWLINE if self.use_simple_nms:NEWLINE bboxes_per_img = torch.cat([bboxes_per_img, scores_per_img], dim=1)NEWLINE else:NEWLINE labels_int_flatten = labels_per_img.int()NEWLINE unique_cls_ids = list(set(list(labels_int_flatten.cpu().numpy())))NEWLINE bboxes_per_img_per_cls = bboxes_per_img.new_zeros((0, 5))NEWLINE labels_per_img_per_cls = labels_int_flatten.new_zeros((0,))NEWLINE for cls_id in unique_cls_ids:NEWLINE cls_id_idx = (labels_int_flatten == cls_id)NEWLINE soft_bboxes, ori_idx = soft_nms(torch.cat((NEWLINE bboxes_per_img[cls_id_idx], scores_per_img[cls_id_idx]), dim=1),NEWLINE iou_thr=0.6)NEWLINE unique_labels = labels_int_flatten[cls_id_idx][ori_idx]NEWLINE bboxes_per_img_per_cls = torch.cat((bboxes_per_img_per_cls, soft_bboxes), dim=0)NEWLINE labels_per_img_per_cls = torch.cat((labels_per_img_per_cls, unique_labels))NEWLINE bboxes_per_img = bboxes_per_img_per_clsNEWLINE labels_per_img = labels_per_img_per_cls.float()NEWLINENEWLINE result_list.append((bboxes_per_img, labels_per_img))NEWLINENEWLINE return result_listNEWLINENEWLINE @force_fp32(apply_to=('pred_feat', 'pred_heatmap', 'pred_wh'))NEWLINE def loss(self,NEWLINE pred_feat,NEWLINE pred_heatmap,NEWLINE pred_wh,NEWLINE gt_bboxes,NEWLINE gt_labels,NEWLINE img_metas,NEWLINE cfg,NEWLINE gt_bboxes_ignore=None):NEWLINE all_targets = self.target_generator(gt_bboxes, gt_labels, img_metas)NEWLINE hm_loss, wh_loss, wh2_loss = self.loss_calc(pred_feat, pred_heatmap, pred_wh, *all_targets)NEWLINE return {'losses/ttfnet_loss_heatmap': hm_loss, 'losses/ttfnet_loss_wh': wh_loss,NEWLINE 'losses/ttfnet_loss_wh2': wh2_loss}NEWLINENEWLINE def _topk(self, scores, topk):NEWLINE batch, cat, height, width = scores.size()NEWLINENEWLINE # both are (batch, 80, topk)NEWLINE topk_scores, topk_inds = torch.topk(scores.view(batch, cat, -1), topk)NEWLINENEWLINE topk_inds = topk_inds % (height * width)NEWLINE topk_ys = (topk_inds / width).int().float()NEWLINE topk_xs = (topk_inds % width).int().float()NEWLINENEWLINE # both are (batch, topk). select topk from 80*topkNEWLINE topk_score, topk_ind = torch.topk(topk_scores.view(batch, -1), topk)NEWLINE topk_clses = (topk_ind / topk).int()NEWLINE topk_ind = topk_ind.unsqueeze(2)NEWLINE topk_inds = topk_inds.view(batch, -1, 1).gather(1, topk_ind).view(batch, topk)NEWLINE topk_ys = topk_ys.view(batch, -1, 1).gather(1, topk_ind).view(batch, topk)NEWLINE topk_xs = topk_xs.view(batch, -1, 1).gather(1, topk_ind).view(batch, topk)NEWLINENEWLINE if self.aug_reg:NEWLINE expand_topk_inds = topk_inds.unsqueeze(-1).expand(*topk_inds.shape, 5)NEWLINE expand_topk_ys = topk_ys.unsqueeze(-1).expand(*topk_ys.shape, 5)NEWLINE expand_topk_xs = topk_xs.unsqueeze(-1).expand(*topk_xs.shape, 5)NEWLINE topk_inds = torch.stack((topk_inds, topk_inds - 1, topk_inds - width,NEWLINE topk_inds + 1, topk_inds + width), dim=2)NEWLINE topk_ys = torch.stack((topk_ys, topk_ys, topk_ys - 1, topk_ys, topk_ys + 1), dim=2)NEWLINE topk_xs = torch.stack((topk_xs, topk_xs - 1, topk_xs, topk_xs + 1, topk_xs), dim=2)NEWLINE aug_err_ys = (topk_ys >= height) | (topk_ys < 0)NEWLINE aug_err_xs = (topk_xs >= width) | (topk_xs < 0)NEWLINE aug_err_inds = (topk_inds >= (height * width)) | (topk_inds < 0)NEWLINE aug_err = aug_err_ys | aug_err_xs | aug_err_indsNEWLINE topk_ys[aug_err] = expand_topk_ys[aug_err]NEWLINE topk_xs[aug_err] = expand_topk_xs[aug_err]NEWLINE topk_inds[aug_err] = expand_topk_inds[aug_err]NEWLINENEWLINE return topk_score, topk_inds, topk_clses, topk_ys, topk_xsNEWLINENEWLINE def gaussian_2d(self, shape, sigma_x=1, sigma_y=1):NEWLINE m, n = [(ss - 1.) / 2. for ss in shape]NEWLINE y, x = np.ogrid[-m:m + 1, -n:n + 1]NEWLINENEWLINE h = np.exp(-(x * x / (2 * sigma_x * sigma_x) + y * y / (2 * sigma_y * sigma_y)))NEWLINE h[h < np.finfo(h.dtype).eps * h.max()] = 0NEWLINE return hNEWLINENEWLINE def draw_truncate_gaussian(self, heatmap, center, h_radius, w_radius, k=1):NEWLINE h, w = 2 * h_radius + 1, 2 * w_radius + 1NEWLINE sigma_x = w / 6NEWLINE sigma_y = h / 6NEWLINE gaussian = self.gaussian_2d((h, w), sigma_x=sigma_x, sigma_y=sigma_y)NEWLINE gaussian = heatmap.new_tensor(gaussian)NEWLINENEWLINE x, y = int(center[0]), int(center[1])NEWLINENEWLINE height, width = heatmap.shape[0:2]NEWLINENEWLINE left, right = min(x, w_radius), min(width - x, w_radius + 1)NEWLINE top, bottom = min(y, h_radius), min(height - y, h_radius + 1)NEWLINENEWLINE masked_heatmap = heatmap[y - top:y + bottom, x - left:x + right]NEWLINE masked_gaussian = gaussian[h_radius - top:h_radius + bottom,NEWLINE w_radius - left:w_radius + right]NEWLINE if min(masked_gaussian.shape) > 0 and min(masked_heatmap.shape) > 0:NEWLINE torch.max(masked_heatmap, masked_gaussian * k, out=masked_heatmap)NEWLINE return heatmapNEWLINENEWLINE def target_single_image(self, gt_boxes, gt_labels, feat_shape):NEWLINE """NEWLINENEWLINE Args:NEWLINE gt_boxes: tensor, tensor <=> img, (num_gt, 4).NEWLINE gt_labels: tensor, tensor <=> img, (num_gt,).NEWLINE feat_shape: tuple.NEWLINENEWLINE Returns:NEWLINE heatmap: tensor, tensor <=> img, (80, h, w).NEWLINE box_target: tensor, tensor <=> img, (4, h, w) or (80 * 4, h, w).NEWLINE reg_weight: tensor, same as box_targetNEWLINE """NEWLINE output_h, output_w = feat_shapeNEWLINE heatmap_channel = self.num_fgNEWLINENEWLINE heatmap = gt_boxes.new_zeros((heatmap_channel, output_h, output_w))NEWLINE fake_heatmap = gt_boxes.new_zeros((output_h, output_w))NEWLINE box_target = gt_boxes.new_ones((self.wh_planes, output_h, output_w)) * -1NEWLINE reg_weight = gt_boxes.new_zeros((self.wh_planes // 4, output_h, output_w))NEWLINENEWLINE boxes_areas_log = bbox_areas(gt_boxes)NEWLINE if self.box_size_range:NEWLINE keep_idx = (self.box_size_range[1] ** 2 >= boxes_areas_log) &\NEWLINE (boxes_areas_log >= self.box_size_range[0] ** 2)NEWLINE boxes_areas_log = boxes_areas_log[keep_idx]NEWLINE gt_boxes = gt_boxes[keep_idx]NEWLINE gt_labels = gt_labels[keep_idx]NEWLINE if self.wh_area_process == 'log':NEWLINE boxes_areas_log = boxes_areas_log.log()NEWLINE elif self.wh_area_process == 'sqrt':NEWLINE boxes_areas_log = boxes_areas_log.sqrt()NEWLINE boxes_area_topk_log, boxes_ind = torch.topk(boxes_areas_log, boxes_areas_log.size(0))NEWLINENEWLINE if self.wh_area_process == 'norm':NEWLINE boxes_area_topk_log[:] = 1.NEWLINENEWLINE gt_boxes = gt_boxes[boxes_ind]NEWLINE gt_labels = gt_labels[boxes_ind]NEWLINENEWLINE feat_gt_boxes = gt_boxes / self.down_ratioNEWLINE feat_gt_boxes[:, [0, 2]] = torch.clamp(feat_gt_boxes[:, [0, 2]], min=0,NEWLINE max=output_w - 1)NEWLINE feat_gt_boxes[:, [1, 3]] = torch.clamp(feat_gt_boxes[:, [1, 3]], min=0,NEWLINE max=output_h - 1)NEWLINE feat_hs, feat_ws = (feat_gt_boxes[:, 3] - feat_gt_boxes[:, 1],NEWLINE feat_gt_boxes[:, 2] - feat_gt_boxes[:, 0])NEWLINENEWLINE # we calc the center and ignore area based on the gt-boxes of the origin scaleNEWLINE # no peak will fall between pixelsNEWLINE ct_ints = (torch.stack([(gt_boxes[:, 0] + gt_boxes[:, 2]) / 2,NEWLINE (gt_boxes[:, 1] + gt_boxes[:, 3]) / 2],NEWLINE dim=1) / self.down_ratio).to(torch.int)NEWLINENEWLINE h_radiuses_alpha = (feat_hs / 2. * self.alpha).int()NEWLINE w_radiuses_alpha = (feat_ws / 2. * self.alpha).int()NEWLINE if self.wh_gaussian and self.alpha != self.beta:NEWLINE h_radiuses_beta = (feat_hs / 2. * self.beta).int()NEWLINE w_radiuses_beta = (feat_ws / 2. * self.beta).int()NEWLINENEWLINE if not self.wh_gaussian:NEWLINE # calculate positive (center) regionsNEWLINE r1 = (1 - self.beta) / 2NEWLINE ctr_x1s, ctr_y1s, ctr_x2s, ctr_y2s = calc_region(gt_boxes.transpose(0, 1), r1)NEWLINE ctr_x1s, ctr_y1s, ctr_x2s, ctr_y2s = [torch.round(x.float() / self.down_ratio).int()NEWLINE for x in [ctr_x1s, ctr_y1s, ctr_x2s, ctr_y2s]]NEWLINE ctr_x1s, ctr_x2s = [torch.clamp(x, max=output_w - 1) for x in [ctr_x1s, ctr_x2s]]NEWLINE ctr_y1s, ctr_y2s = [torch.clamp(y, max=output_h - 1) for y in [ctr_y1s, ctr_y2s]]NEWLINENEWLINE # larger boxes have lower priority than small boxes.NEWLINE for k in range(boxes_ind.shape[0]):NEWLINE cls_id = gt_labels[k] - 1NEWLINENEWLINE fake_heatmap = fake_heatmap.zero_()NEWLINE self.draw_truncate_gaussian(fake_heatmap, ct_ints[k],NEWLINE h_radiuses_alpha[k].item(), w_radiuses_alpha[k].item())NEWLINE heatmap[cls_id] = torch.max(heatmap[cls_id], fake_heatmap)NEWLINENEWLINE if self.wh_gaussian:NEWLINE if self.alpha != self.beta:NEWLINE fake_heatmap = fake_heatmap.zero_()NEWLINE self.draw_truncate_gaussian(fake_heatmap, ct_ints[k],NEWLINE h_radiuses_beta[k].item(),NEWLINE w_radiuses_beta[k].item())NEWLINE box_target_inds = fake_heatmap > 0NEWLINE else:NEWLINE ctr_x1, ctr_y1, ctr_x2, ctr_y2 = ctr_x1s[k], ctr_y1s[k], ctr_x2s[k], ctr_y2s[k]NEWLINE box_target_inds = torch.zeros_like(fake_heatmap, dtype=torch.uint8)NEWLINE box_target_inds[ctr_y1:ctr_y2 + 1, ctr_x1:ctr_x2 + 1] = 1NEWLINENEWLINE if self.wh_agnostic:NEWLINE box_target[:, box_target_inds] = gt_boxes[k][:, None]NEWLINE cls_id = 0NEWLINE else:NEWLINE box_target[(cls_id * 4):((cls_id + 1) * 4), box_target_inds] = gt_boxes[k][:, None]NEWLINENEWLINE if self.wh_gaussian:NEWLINE local_heatmap = fake_heatmap[box_target_inds]NEWLINE ct_div = local_heatmap.sum()NEWLINE local_heatmap *= boxes_area_topk_log[k]NEWLINE reg_weight[cls_id, box_target_inds] = local_heatmap / ct_divNEWLINE else:NEWLINE reg_weight[cls_id, box_target_inds] = \NEWLINE boxes_area_topk_log[k] / box_target_inds.sum().float()NEWLINENEWLINE reg_weight[cls_id, box_target_inds] = local_heatmap / ct_divNEWLINENEWLINE return heatmap, box_target, reg_weightNEWLINENEWLINE def target_generator(self, gt_boxes, gt_labels, img_metas):NEWLINE """NEWLINENEWLINE Args:NEWLINE gt_boxes: list(tensor). tensor <=> image, (gt_num, 4).NEWLINE gt_labels: list(tensor). tensor <=> image, (gt_num,).NEWLINE img_metas: list(dict).NEWLINENEWLINE Returns:NEWLINE heatmap: tensor, (batch, 80, h, w).NEWLINE box_target: tensor, (batch, 4, h, w) or (batch, 80 * 4, h, w).NEWLINE reg_weight: tensor, same as box_target.NEWLINE """NEWLINE with torch.no_grad():NEWLINE feat_shape = (img_metas[0]['pad_shape'][0] // self.down_ratio,NEWLINE img_metas[0]['pad_shape'][1] // self.down_ratio)NEWLINE heatmap, box_target, reg_weight = multi_apply(NEWLINE self.target_single_image,NEWLINE gt_boxes,NEWLINE gt_labels,NEWLINE feat_shape=feat_shapeNEWLINE )NEWLINENEWLINE heatmap, box_target = [torch.stack(t, dim=0).detach() for t in [heatmap, box_target]]NEWLINE reg_weight = torch.stack(reg_weight, dim=0).detach()NEWLINENEWLINE return heatmap, box_target, reg_weightNEWLINENEWLINE def loss_calc(self,NEWLINE pred_feat,NEWLINE pred_hm,NEWLINE pred_wh,NEWLINE heatmap,NEWLINE box_target,NEWLINE wh_weight):NEWLINE """NEWLINENEWLINE Args:NEWLINE pred_hm: tensor, (batch, 80, h, w).NEWLINE pred_wh: tensor, (batch, 4, h, w) or (batch, 80 * 4, h, w).NEWLINE heatmap: tensor, same as pred_hm.NEWLINE box_target: tensor, same as pred_wh.NEWLINE wh_weight: tensor, same as pred_wh.NEWLINENEWLINE Returns:NEWLINE hm_lossNEWLINE wh_lossNEWLINE """NEWLINE H, W = pred_hm.shape[2:]NEWLINE pred_hm = torch.clamp(pred_hm.sigmoid_(), min=1e-4, max=1 - 1e-4)NEWLINE hm_loss = ct_focal_loss(pred_hm, heatmap) * self.hm_weightNEWLINENEWLINE mask = wh_weight.view(-1, H, W)NEWLINE avg_factor = mask.sum() + 1e-4NEWLINENEWLINE if self.base_loc is None or H != self.base_loc.shape[1] or W != self.base_loc.shape[2]:NEWLINE base_step = self.down_ratioNEWLINE shifts_x = torch.arange(0, (W - 1) * base_step + 1, base_step,NEWLINE dtype=torch.float32, device=heatmap.device)NEWLINE shifts_y = torch.arange(0, (H - 1) * base_step + 1, base_step,NEWLINE dtype=torch.float32, device=heatmap.device)NEWLINE shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x)NEWLINE self.base_loc = torch.stack((shift_x, shift_y), dim=0) # (2, h, w)NEWLINENEWLINE # (batch, h, w, 4)NEWLINE pred_boxes = torch.cat((self.base_loc - pred_wh[:, [0, 1]],NEWLINE self.base_loc + pred_wh[:, [2, 3]]), dim=1).permute(0, 2, 3, 1)NEWLINE # (batch, h, w, 4)NEWLINE boxes = box_target.permute(0, 2, 3, 1)NEWLINE wh_loss = self.iou_loss(pred_boxes, boxes, mask,NEWLINE avg_factor=avg_factor) * self.wh_weightNEWLINENEWLINE wh2_loss = wh_loss.new_zeros([1])NEWLINE if self.two_stage:NEWLINE heat = simple_nms(pred_hm)NEWLINE scores, inds, clses, ys, xs = self._topk(heat, topk=100)NEWLINENEWLINE pred_boxes_2 = pred_boxes.view(pred_boxes.size(0), -1, pred_boxes.size(3))NEWLINE boxes_2 = boxes.view(*pred_boxes_2.shape)NEWLINE inds = inds.unsqueeze(2).expand(inds.size(0), inds.size(1), pred_boxes_2.size(2))NEWLINE pred_boxes_2 = pred_boxes_2.gather(1, inds) # (batch, 100, 4)NEWLINE boxes_2 = boxes_2.gather(1, inds)NEWLINENEWLINE score_thr = 0.01NEWLINE scores_keep = scores > score_thr # (batch, topk)NEWLINENEWLINE batch_idx = pred_boxes_2.new_tensor(torch.arange(0., pred_boxes_2.shape[0], 1.)).view(NEWLINE -1, 1, 1).expand(pred_boxes_2.shape[0], pred_boxes_2.shape[1], 1)[scores_keep]NEWLINE pred_boxes_2 = pred_boxes_2[scores_keep]NEWLINE boxes_2 = boxes_2[scores_keep].detach()NEWLINENEWLINE valid_boxes = (boxes_2 >= 0).min(1)[0]NEWLINE batch_idx = batch_idx[valid_boxes] # (n, 1)NEWLINE pred_boxes_2 = pred_boxes_2[valid_boxes] # (n, 4)NEWLINE boxes_2 = boxes_2[valid_boxes] # (n, 4)NEWLINE roi_boxes = torch.cat((batch_idx, pred_boxes_2), dim=1).detach()NEWLINENEWLINE if roi_boxes.size(0) > 0:NEWLINE rois = self.align(pred_feat, roi_boxes) # (n, cha, 7, 7)NEWLINE pred_wh2 = self.wh2(rois).view(-1, 4)NEWLINE pred_boxes_2[:, [0, 1]] = pred_boxes_2[:, [0, 1]].detach() - \NEWLINE pred_wh2[:, [0, 1]] * 16NEWLINE pred_boxes_2[:, [2, 3]] = pred_boxes_2[:, [2, 3]].detach() + \NEWLINE pred_wh2[:, [2, 3]] * 16NEWLINE wh2_loss = giou_loss(pred_boxes_2, boxes_2,NEWLINE boxes_2.new_ones(boxes_2.size(0)))NEWLINENEWLINE return hm_loss, wh_loss, wh2_lossNEWLINENEWLINENEWLINEclass ShortcutConv2d(nn.Module):NEWLINENEWLINE def __init__(self,NEWLINE in_channels,NEWLINE out_channels,NEWLINE kernel_sizes,NEWLINE paddings,NEWLINE activation_last=False,NEWLINE down=False):NEWLINE super(ShortcutConv2d, self).__init__()NEWLINE assert len(kernel_sizes) == len(paddings)NEWLINENEWLINE layers = []NEWLINE for i, (kernel_size, padding) in enumerate(zip(kernel_sizes, paddings)):NEWLINE inc = in_channels if i == 0 else out_channelsNEWLINE if i == 0 and down:NEWLINE layers.append(nn.Conv2d(inc, out_channels, kernel_size,NEWLINE padding=padding, stride=2))NEWLINE else:NEWLINE layers.append(nn.Conv2d(inc, out_channels, kernel_size, padding=padding))NEWLINE if i < len(kernel_sizes) - 1 or activation_last:NEWLINE layers.append(nn.ReLU(inplace=True))NEWLINENEWLINE self.layers = nn.Sequential(*layers)NEWLINENEWLINE def forward(self, x):NEWLINE if isinstance(x, tuple):NEWLINE x = torch.cat([x[0], F.upsample(x[1], scale_factor=2)], dim=1)NEWLINE y = self.layers(x)NEWLINE return yNEWLINE
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ==============================================================================NEWLINE"""Common utility functions for evaluation."""NEWLINEfrom __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport collectionsNEWLINEimport osNEWLINEimport reNEWLINEimport timeNEWLINENEWLINEimport numpy as npNEWLINEfrom six.moves import rangeNEWLINEimport tensorflow.compat.v1 as tfNEWLINENEWLINEimport tf_slim as slimNEWLINENEWLINEfrom object_detection.core import box_listNEWLINEfrom object_detection.core import box_list_opsNEWLINEfrom object_detection.core import keypoint_opsNEWLINEfrom object_detection.core import standard_fields as fieldsNEWLINEfrom object_detection.metrics import coco_evaluationNEWLINEfrom object_detection.metrics import lvis_evaluationNEWLINEfrom object_detection.protos import eval_pb2NEWLINEfrom object_detection.utils import label_map_utilNEWLINEfrom object_detection.utils import object_detection_evaluationNEWLINEfrom object_detection.utils import opsNEWLINEfrom object_detection.utils import shape_utilsNEWLINEfrom object_detection.utils import visualization_utils as vis_utilsNEWLINENEWLINEEVAL_KEYPOINT_METRIC = 'coco_keypoint_metrics'NEWLINENEWLINE# A dictionary of metric names to classes that implement the metric. The classesNEWLINE# in the dictionary must implementNEWLINE# utils.object_detection_evaluation.DetectionEvaluator interface.NEWLINEEVAL_METRICS_CLASS_DICT = {NEWLINE 'coco_detection_metrics':NEWLINE coco_evaluation.CocoDetectionEvaluator,NEWLINE 'coco_keypoint_metrics':NEWLINE coco_evaluation.CocoKeypointEvaluator,NEWLINE 'coco_mask_metrics':NEWLINE coco_evaluation.CocoMaskEvaluator,NEWLINE 'coco_panoptic_metrics':NEWLINE coco_evaluation.CocoPanopticSegmentationEvaluator,NEWLINE 'lvis_mask_metrics':NEWLINE lvis_evaluation.LVISMaskEvaluator,NEWLINE 'oid_challenge_detection_metrics':NEWLINE object_detection_evaluation.OpenImagesDetectionChallengeEvaluator,NEWLINE 'oid_challenge_segmentation_metrics':NEWLINE object_detection_evaluationNEWLINE .OpenImagesInstanceSegmentationChallengeEvaluator,NEWLINE 'pascal_voc_detection_metrics':NEWLINE object_detection_evaluation.PascalDetectionEvaluator,NEWLINE 'weighted_pascal_voc_detection_metrics':NEWLINE object_detection_evaluation.WeightedPascalDetectionEvaluator,NEWLINE 'precision_at_recall_detection_metrics':NEWLINE object_detection_evaluation.PrecisionAtRecallDetectionEvaluator,NEWLINE 'pascal_voc_instance_segmentation_metrics':NEWLINE object_detection_evaluation.PascalInstanceSegmentationEvaluator,NEWLINE 'weighted_pascal_voc_instance_segmentation_metrics':NEWLINE object_detection_evaluation.WeightedPascalInstanceSegmentationEvaluator,NEWLINE 'oid_V2_detection_metrics':NEWLINE object_detection_evaluation.OpenImagesDetectionEvaluator,NEWLINE}NEWLINENEWLINEEVAL_DEFAULT_METRIC = 'coco_detection_metrics'NEWLINENEWLINENEWLINEdef write_metrics(metrics, global_step, summary_dir):NEWLINE """Write metrics to a summary directory.NEWLINENEWLINE Args:NEWLINE metrics: A dictionary containing metric names and values.NEWLINE global_step: Global step at which the metrics are computed.NEWLINE summary_dir: Directory to write tensorflow summaries to.NEWLINE """NEWLINE tf.logging.info('Writing metrics to tf summary.')NEWLINE summary_writer = tf.summary.FileWriterCache.get(summary_dir)NEWLINE for key in sorted(metrics):NEWLINE summary = tf.Summary(value=[NEWLINE tf.Summary.Value(tag=key, simple_value=metrics[key]),NEWLINE ])NEWLINE summary_writer.add_summary(summary, global_step)NEWLINE tf.logging.info('%s: %f', key, metrics[key])NEWLINE tf.logging.info('Metrics written to tf summary.')NEWLINENEWLINENEWLINE# TODO(rathodv): Add tests.NEWLINEdef visualize_detection_results(result_dict,NEWLINE tag,NEWLINE global_step,NEWLINE categories,NEWLINE summary_dir='',NEWLINE export_dir='',NEWLINE agnostic_mode=False,NEWLINE show_groundtruth=False,NEWLINE groundtruth_box_visualization_color='black',NEWLINE min_score_thresh=.5,NEWLINE max_num_predictions=20,NEWLINE skip_scores=False,NEWLINE skip_labels=False,NEWLINE skip_weights=True,NEWLINE keep_image_id_for_visualization_export=False):NEWLINE """Visualizes detection results and writes visualizations to image summaries.NEWLINENEWLINE This function visualizes an image with its detected bounding boxes and writesNEWLINE to image summaries which can be viewed on tensorboard. It optionally alsoNEWLINE writes images to a directory. In the case of missing entry in the label map,NEWLINE unknown class name in the visualization is shown as "N/A".NEWLINENEWLINE Args:NEWLINE result_dict: a dictionary holding groundtruth and detectionNEWLINE data corresponding to each image being evaluated. The following keysNEWLINE are required:NEWLINE 'original_image': a numpy array representing the image with shapeNEWLINE [1, height, width, 3] or [1, height, width, 1]NEWLINE 'detection_boxes': a numpy array of shape [N, 4]NEWLINE 'detection_scores': a numpy array of shape [N]NEWLINE 'detection_classes': a numpy array of shape [N]NEWLINE The following keys are optional:NEWLINE 'groundtruth_boxes': a numpy array of shape [N, 4]NEWLINE 'groundtruth_keypoints': a numpy array of shape [N, num_keypoints, 2]NEWLINE Detections are assumed to be provided in decreasing order of score and forNEWLINE display, and we assume that scores are probabilities between 0 and 1.NEWLINE tag: tensorboard tag (string) to associate with image.NEWLINE global_step: global step at which the visualization are generated.NEWLINE categories: a list of dictionaries representing all possible categories.NEWLINE Each dict in this list has the following keys:NEWLINE 'id': (required) an integer id uniquely identifying this categoryNEWLINE 'name': (required) string representing category nameNEWLINE e.g., 'cat', 'dog', 'pizza'NEWLINE 'supercategory': (optional) string representing the supercategoryNEWLINE e.g., 'animal', 'vehicle', 'food', etcNEWLINE summary_dir: the output directory to which the image summaries are written.NEWLINE export_dir: the output directory to which images are written. If this isNEWLINE empty (default), then images are not exported.NEWLINE agnostic_mode: boolean (default: False) controlling whether to evaluate inNEWLINE class-agnostic mode or not.NEWLINE show_groundtruth: boolean (default: False) controlling whether to showNEWLINE groundtruth boxes in addition to detected boxesNEWLINE groundtruth_box_visualization_color: box color for visualizing groundtruthNEWLINE boxesNEWLINE min_score_thresh: minimum score threshold for a box to be visualizedNEWLINE max_num_predictions: maximum number of detections to visualizeNEWLINE skip_scores: whether to skip score when drawing a single detectionNEWLINE skip_labels: whether to skip label when drawing a single detectionNEWLINE keep_image_id_for_visualization_export: whether to keep image identifier inNEWLINE filename when exported to export_dirNEWLINE Raises:NEWLINE ValueError: if result_dict does not contain the expected keys (i.e.,NEWLINE 'original_image', 'detection_boxes', 'detection_scores',NEWLINE 'detection_classes')NEWLINE """NEWLINE detection_fields = fields.DetectionResultFieldsNEWLINE input_fields = fields.InputDataFieldsNEWLINE if not set([NEWLINE input_fields.original_image,NEWLINE detection_fields.detection_boxes,NEWLINE detection_fields.detection_scores,NEWLINE detection_fields.detection_classes,NEWLINE ]).issubset(set(result_dict.keys())):NEWLINE raise ValueError('result_dict does not contain all expected keys.')NEWLINE if show_groundtruth and input_fields.groundtruth_boxes not in result_dict:NEWLINE raise ValueError('If show_groundtruth is enabled, result_dict must contain 'NEWLINE 'groundtruth_boxes.')NEWLINE tf.logging.info('Creating detection visualizations.')NEWLINE category_index = label_map_util.create_category_index(categories)NEWLINENEWLINE image = np.squeeze(result_dict[input_fields.original_image], axis=0)NEWLINE if image.shape[2] == 1: # If one channel image, repeat in RGB.NEWLINE image = np.tile(image, [1, 1, 3])NEWLINE detection_boxes = result_dict[detection_fields.detection_boxes]NEWLINE detection_scores = result_dict[detection_fields.detection_scores]NEWLINE detection_classes = np.int32((result_dict[NEWLINE detection_fields.detection_classes]))NEWLINE detection_keypoints = result_dict.get(detection_fields.detection_keypoints)NEWLINE detection_masks = result_dict.get(detection_fields.detection_masks)NEWLINE detection_boundaries = result_dict.get(detection_fields.detection_boundaries)NEWLINE detection_weights = result_dict[detection_fields.detection_weightPerObject]NEWLINENEWLINE # Plot groundtruth underneath detectionsNEWLINE if show_groundtruth:NEWLINE groundtruth_boxes = result_dict[input_fields.groundtruth_boxes]NEWLINE groundtruth_keypoints = result_dict.get(input_fields.groundtruth_keypoints)NEWLINE vis_utils.visualize_boxes_and_labels_on_image_array(NEWLINE image=image,NEWLINE boxes=groundtruth_boxes,NEWLINE classes=None,NEWLINE scores=None,NEWLINE category_index=category_index,NEWLINE keypoints=groundtruth_keypoints,NEWLINE use_normalized_coordinates=False,NEWLINE max_boxes_to_draw=None,NEWLINE groundtruth_box_visualization_color=groundtruth_box_visualization_color)NEWLINE vis_utils.visualize_boxes_and_labels_on_image_array(NEWLINE image,NEWLINE detection_boxes,NEWLINE detection_classes,NEWLINE detection_scores,NEWLINE detection_weights,NEWLINE category_index,NEWLINE instance_masks=detection_masks,NEWLINE instance_boundaries=detection_boundaries,NEWLINE keypoints=detection_keypoints,NEWLINE use_normalized_coordinates=False,NEWLINE max_boxes_to_draw=max_num_predictions,NEWLINE min_score_thresh=min_score_thresh,NEWLINE agnostic_mode=agnostic_mode,NEWLINE skip_scores=skip_scores,NEWLINE skip_labels=skip_labels,NEWLINE skip_weights=skip_weights)NEWLINENEWLINE if export_dir:NEWLINE if keep_image_id_for_visualization_export and result_dict[fields.NEWLINE InputDataFields()NEWLINE .key]:NEWLINE export_path = os.path.join(export_dir, 'export-{}-{}.png'.format(NEWLINE tag, result_dict[fields.InputDataFields().key]))NEWLINE else:NEWLINE export_path = os.path.join(export_dir, 'export-{}.png'.format(tag))NEWLINE vis_utils.save_image_array_as_png(image, export_path)NEWLINENEWLINE summary = tf.Summary(value=[NEWLINE tf.Summary.Value(NEWLINE tag=tag,NEWLINE image=tf.Summary.Image(NEWLINE encoded_image_string=vis_utils.encode_image_array_as_png_str(NEWLINE image)))NEWLINE ])NEWLINE summary_writer = tf.summary.FileWriterCache.get(summary_dir)NEWLINE summary_writer.add_summary(summary, global_step)NEWLINENEWLINE tf.logging.info('Detection visualizations written to summary with tag %s.',NEWLINE tag)NEWLINENEWLINENEWLINEdef _run_checkpoint_once(tensor_dict,NEWLINE evaluators=None,NEWLINE batch_processor=None,NEWLINE checkpoint_dirs=None,NEWLINE variables_to_restore=None,NEWLINE restore_fn=None,NEWLINE num_batches=1,NEWLINE master='',NEWLINE save_graph=False,NEWLINE save_graph_dir='',NEWLINE losses_dict=None,NEWLINE eval_export_path=None,NEWLINE process_metrics_fn=None):NEWLINE """Evaluates metrics defined in evaluators and returns summaries.NEWLINENEWLINE This function loads the latest checkpoint in checkpoint_dirs and evaluatesNEWLINE all metrics defined in evaluators. The metrics are processed in batch by theNEWLINE batch_processor.NEWLINENEWLINE Args:NEWLINE tensor_dict: a dictionary holding tensors representing a batch of detectionsNEWLINE and corresponding groundtruth annotations.NEWLINE evaluators: a list of object of type DetectionEvaluator to be used forNEWLINE evaluation. Note that the metric names produced by different evaluatorsNEWLINE must be unique.NEWLINE batch_processor: a function taking four arguments:NEWLINE 1. tensor_dict: the same tensor_dict that is passed in as the firstNEWLINE argument to this function.NEWLINE 2. sess: a tensorflow sessionNEWLINE 3. batch_index: an integer representing the index of the batch amongstNEWLINE all batchesNEWLINE By default, batch_processor is None, which defaults to running:NEWLINE return sess.run(tensor_dict)NEWLINE To skip an image, it suffices to return an empty dictionary in place ofNEWLINE result_dict.NEWLINE checkpoint_dirs: list of directories to load into an EnsembleModel. If itNEWLINE has only one directory, EnsembleModel will not be used --NEWLINE a DetectionModelNEWLINE will be instantiated directly. Not used if restore_fn is set.NEWLINE variables_to_restore: None, or a dictionary mapping variable names found inNEWLINE a checkpoint to model variables. The dictionary would normally beNEWLINE generated by creating a tf.train.ExponentialMovingAverage object andNEWLINE calling its variables_to_restore() method. Not used if restore_fn is set.NEWLINE restore_fn: None, or a function that takes a tf.Session object and correctlyNEWLINE restores all necessary variables from the correct checkpoint file. IfNEWLINE None, attempts to restore from the first directory in checkpoint_dirs.NEWLINE num_batches: the number of batches to use for evaluation.NEWLINE master: the location of the Tensorflow session.NEWLINE save_graph: whether or not the Tensorflow graph is stored as a pbtxt file.NEWLINE save_graph_dir: where to store the Tensorflow graph on disk. If save_graphNEWLINE is True this must be non-empty.NEWLINE losses_dict: optional dictionary of scalar detection losses.NEWLINE eval_export_path: Path for saving a json file that contains the detectionNEWLINE results in json format.NEWLINE process_metrics_fn: a callback called with evaluation results after eachNEWLINE evaluation is done. It could be used e.g. to back up checkpoints withNEWLINE best evaluation scores, or to call an external system to update evaluationNEWLINE results in order to drive best hyper-parameter search. Parameters are:NEWLINE int checkpoint_number, Dict[str, ObjectDetectionEvalMetrics] metrics,NEWLINE str checkpoint_file path.NEWLINENEWLINE Returns:NEWLINE global_step: the count of global steps.NEWLINE all_evaluator_metrics: A dictionary containing metric names and values.NEWLINENEWLINE Raises:NEWLINE ValueError: if restore_fn is None and checkpoint_dirs doesn't have at leastNEWLINE one element.NEWLINE ValueError: if save_graph is True and save_graph_dir is not defined.NEWLINE """NEWLINE if save_graph and not save_graph_dir:NEWLINE raise ValueError('`save_graph_dir` must be defined.')NEWLINE sess = tf.Session(master, graph=tf.get_default_graph())NEWLINE sess.run(tf.global_variables_initializer())NEWLINE sess.run(tf.local_variables_initializer())NEWLINE sess.run(tf.tables_initializer())NEWLINE checkpoint_file = NoneNEWLINE if restore_fn:NEWLINE restore_fn(sess)NEWLINE else:NEWLINE if not checkpoint_dirs:NEWLINE raise ValueError('`checkpoint_dirs` must have at least one entry.')NEWLINE checkpoint_file = tf.train.latest_checkpoint(checkpoint_dirs[0])NEWLINE saver = tf.train.Saver(variables_to_restore)NEWLINE saver.restore(sess, checkpoint_file)NEWLINENEWLINE if save_graph:NEWLINE tf.train.write_graph(sess.graph_def, save_graph_dir, 'eval.pbtxt')NEWLINENEWLINE counters = {'skipped': 0, 'success': 0}NEWLINE aggregate_result_losses_dict = collections.defaultdict(list)NEWLINE with slim.queues.QueueRunners(sess):NEWLINE try:NEWLINE for batch in range(int(num_batches)):NEWLINE if (batch + 1) % 100 == 0:NEWLINE tf.logging.info('Running eval ops batch %d/%d', batch + 1,NEWLINE num_batches)NEWLINE if not batch_processor:NEWLINE try:NEWLINE if not losses_dict:NEWLINE losses_dict = {}NEWLINE result_dict, result_losses_dict = sess.run([tensor_dict,NEWLINE losses_dict])NEWLINE counters['success'] += 1NEWLINE except tf.errors.InvalidArgumentError:NEWLINE tf.logging.info('Skipping image')NEWLINE counters['skipped'] += 1NEWLINE result_dict = {}NEWLINE else:NEWLINE result_dict, result_losses_dict = batch_processor(NEWLINE tensor_dict, sess, batch, counters, losses_dict=losses_dict)NEWLINE if not result_dict:NEWLINE continueNEWLINE for key, value in iter(result_losses_dict.items()):NEWLINE aggregate_result_losses_dict[key].append(value)NEWLINE for evaluator in evaluators:NEWLINE # TODO(b/65130867): Use image_id tensor once we fix the input dataNEWLINE # decoders to return correct image_id.NEWLINE # TODO(akuznetsa): result_dict contains batches of images, whileNEWLINE # add_single_ground_truth_image_info expects a single image. FixNEWLINE if (isinstance(result_dict, dict) andNEWLINE fields.InputDataFields.key in result_dict andNEWLINE result_dict[fields.InputDataFields.key]):NEWLINE image_id = result_dict[fields.InputDataFields.key]NEWLINE else:NEWLINE image_id = batchNEWLINE evaluator.add_single_ground_truth_image_info(NEWLINE image_id=image_id, groundtruth_dict=result_dict)NEWLINE evaluator.add_single_detected_image_info(NEWLINE image_id=image_id, detections_dict=result_dict)NEWLINE tf.logging.info('Running eval batches done.')NEWLINE except tf.errors.OutOfRangeError:NEWLINE tf.logging.info('Done evaluating -- epoch limit reached')NEWLINE finally:NEWLINE # When done, ask the threads to stop.NEWLINE tf.logging.info('# success: %d', counters['success'])NEWLINE tf.logging.info('# skipped: %d', counters['skipped'])NEWLINE all_evaluator_metrics = {}NEWLINE if eval_export_path and eval_export_path is not None:NEWLINE for evaluator in evaluators:NEWLINE if (isinstance(evaluator, coco_evaluation.CocoDetectionEvaluator) orNEWLINE isinstance(evaluator, coco_evaluation.CocoMaskEvaluator)):NEWLINE tf.logging.info('Started dumping to json file.')NEWLINE evaluator.dump_detections_to_json_file(NEWLINE json_output_path=eval_export_path)NEWLINE tf.logging.info('Finished dumping to json file.')NEWLINE for evaluator in evaluators:NEWLINE metrics = evaluator.evaluate()NEWLINE evaluator.clear()NEWLINE if any(key in all_evaluator_metrics for key in metrics):NEWLINE raise ValueError('Metric names between evaluators must not collide.')NEWLINE all_evaluator_metrics.update(metrics)NEWLINE global_step = tf.train.global_step(sess, tf.train.get_global_step())NEWLINENEWLINE for key, value in iter(aggregate_result_losses_dict.items()):NEWLINE all_evaluator_metrics['Losses/' + key] = np.mean(value)NEWLINE if process_metrics_fn and checkpoint_file:NEWLINE m = re.search(r'model.ckpt-(\d+)$', checkpoint_file)NEWLINE if not m:NEWLINE tf.logging.error('Failed to parse checkpoint number from: %s',NEWLINE checkpoint_file)NEWLINE else:NEWLINE checkpoint_number = int(m.group(1))NEWLINE process_metrics_fn(checkpoint_number, all_evaluator_metrics,NEWLINE checkpoint_file)NEWLINE sess.close()NEWLINE return (global_step, all_evaluator_metrics)NEWLINENEWLINENEWLINE# TODO(rathodv): Add tests.NEWLINEdef repeated_checkpoint_run(tensor_dict,NEWLINE summary_dir,NEWLINE evaluators,NEWLINE batch_processor=None,NEWLINE checkpoint_dirs=None,NEWLINE variables_to_restore=None,NEWLINE restore_fn=None,NEWLINE num_batches=1,NEWLINE eval_interval_secs=120,NEWLINE max_number_of_evaluations=None,NEWLINE max_evaluation_global_step=None,NEWLINE master='',NEWLINE save_graph=False,NEWLINE save_graph_dir='',NEWLINE losses_dict=None,NEWLINE eval_export_path=None,NEWLINE process_metrics_fn=None):NEWLINE """Periodically evaluates desired tensors using checkpoint_dirs or restore_fn.NEWLINENEWLINE This function repeatedly loads a checkpoint and evaluates a desiredNEWLINE set of tensors (provided by tensor_dict) and hands the resulting numpyNEWLINE arrays to a function result_processor which can be used to furtherNEWLINE process/save/visualize the results.NEWLINENEWLINE Args:NEWLINE tensor_dict: a dictionary holding tensors representing a batch of detectionsNEWLINE and corresponding groundtruth annotations.NEWLINE summary_dir: a directory to write metrics summaries.NEWLINE evaluators: a list of object of type DetectionEvaluator to be used forNEWLINE evaluation. Note that the metric names produced by different evaluatorsNEWLINE must be unique.NEWLINE batch_processor: a function taking three arguments:NEWLINE 1. tensor_dict: the same tensor_dict that is passed in as the firstNEWLINE argument to this function.NEWLINE 2. sess: a tensorflow sessionNEWLINE 3. batch_index: an integer representing the index of the batch amongstNEWLINE all batchesNEWLINE By default, batch_processor is None, which defaults to running:NEWLINE return sess.run(tensor_dict)NEWLINE checkpoint_dirs: list of directories to load into a DetectionModel or anNEWLINE EnsembleModel if restore_fn isn't set. Also used to determine when to runNEWLINE next evaluation. Must have at least one element.NEWLINE variables_to_restore: None, or a dictionary mapping variable names found inNEWLINE a checkpoint to model variables. The dictionary would normally beNEWLINE generated by creating a tf.train.ExponentialMovingAverage object andNEWLINE calling its variables_to_restore() method. Not used if restore_fn is set.NEWLINE restore_fn: a function that takes a tf.Session object and correctly restoresNEWLINE all necessary variables from the correct checkpoint file.NEWLINE num_batches: the number of batches to use for evaluation.NEWLINE eval_interval_secs: the number of seconds between each evaluation run.NEWLINE max_number_of_evaluations: the max number of iterations of the evaluation.NEWLINE If the value is left as None the evaluation continues indefinitely.NEWLINE max_evaluation_global_step: global step when evaluation stops.NEWLINE master: the location of the Tensorflow session.NEWLINE save_graph: whether or not the Tensorflow graph is saved as a pbtxt file.NEWLINE save_graph_dir: where to save on disk the Tensorflow graph. If store_graphNEWLINE is True this must be non-empty.NEWLINE losses_dict: optional dictionary of scalar detection losses.NEWLINE eval_export_path: Path for saving a json file that contains the detectionNEWLINE results in json format.NEWLINE process_metrics_fn: a callback called with evaluation results after eachNEWLINE evaluation is done. It could be used e.g. to back up checkpoints withNEWLINE best evaluation scores, or to call an external system to update evaluationNEWLINE results in order to drive best hyper-parameter search. Parameters are:NEWLINE int checkpoint_number, Dict[str, ObjectDetectionEvalMetrics] metrics,NEWLINE str checkpoint_file path.NEWLINENEWLINE Returns:NEWLINE metrics: A dictionary containing metric names and values in the latestNEWLINE evaluation.NEWLINENEWLINE Raises:NEWLINE ValueError: if max_num_of_evaluations is not None or a positive number.NEWLINE ValueError: if checkpoint_dirs doesn't have at least one element.NEWLINE """NEWLINE if max_number_of_evaluations and max_number_of_evaluations <= 0:NEWLINE raise ValueError(NEWLINE '`max_number_of_evaluations` must be either None or a positive number.')NEWLINE if max_evaluation_global_step and max_evaluation_global_step <= 0:NEWLINE raise ValueError(NEWLINE '`max_evaluation_global_step` must be either None or positive.')NEWLINENEWLINE if not checkpoint_dirs:NEWLINE raise ValueError('`checkpoint_dirs` must have at least one entry.')NEWLINENEWLINE last_evaluated_model_path = NoneNEWLINE number_of_evaluations = 0NEWLINE while True:NEWLINE start = time.time()NEWLINE tf.logging.info('Starting evaluation at ' + time.strftime(NEWLINE '%Y-%m-%d-%H:%M:%S', time.gmtime()))NEWLINE model_path = tf.train.latest_checkpoint(checkpoint_dirs[0])NEWLINE if not model_path:NEWLINE tf.logging.info('No model found in %s. Will try again in %d seconds',NEWLINE checkpoint_dirs[0], eval_interval_secs)NEWLINE elif model_path == last_evaluated_model_path:NEWLINE tf.logging.info('Found already evaluated checkpoint. Will try again in 'NEWLINE '%d seconds', eval_interval_secs)NEWLINE else:NEWLINE last_evaluated_model_path = model_pathNEWLINE global_step, metrics = _run_checkpoint_once(NEWLINE tensor_dict,NEWLINE evaluators,NEWLINE batch_processor,NEWLINE checkpoint_dirs,NEWLINE variables_to_restore,NEWLINE restore_fn,NEWLINE num_batches,NEWLINE master,NEWLINE save_graph,NEWLINE save_graph_dir,NEWLINE losses_dict=losses_dict,NEWLINE eval_export_path=eval_export_path,NEWLINE process_metrics_fn=process_metrics_fn)NEWLINE write_metrics(metrics, global_step, summary_dir)NEWLINE if (max_evaluation_global_step andNEWLINE global_step >= max_evaluation_global_step):NEWLINE tf.logging.info('Finished evaluation!')NEWLINE breakNEWLINE number_of_evaluations += 1NEWLINENEWLINE if (max_number_of_evaluations andNEWLINE number_of_evaluations >= max_number_of_evaluations):NEWLINE tf.logging.info('Finished evaluation!')NEWLINE breakNEWLINE time_to_next_eval = start + eval_interval_secs - time.time()NEWLINE if time_to_next_eval > 0:NEWLINE time.sleep(time_to_next_eval)NEWLINENEWLINE return metricsNEWLINENEWLINENEWLINEdef _scale_box_to_absolute(args):NEWLINE boxes, image_shape = argsNEWLINE return box_list_ops.to_absolute_coordinates(NEWLINE box_list.BoxList(boxes), image_shape[0], image_shape[1]).get()NEWLINENEWLINENEWLINEdef _resize_detection_masks(arg_tuple):NEWLINE """Resizes detection masks.NEWLINENEWLINE Args:NEWLINE arg_tuple: A (detection_boxes, detection_masks, image_shape, pad_shape)NEWLINE tuple whereNEWLINE detection_boxes is a tf.float32 tensor of size [num_masks, 4] containingNEWLINE the box corners. Row i contains [ymin, xmin, ymax, xmax] of the boxNEWLINE corresponding to mask i. Note that the box corners are inNEWLINE normalized coordinates.NEWLINE detection_masks is a tensor of sizeNEWLINE [num_masks, mask_height, mask_width].NEWLINE image_shape is a tensor of shape [2]NEWLINE pad_shape is a tensor of shape [2] --- this is assumed to be greaterNEWLINE than or equal to image_shape along both dimensions and represents aNEWLINE shape to-be-padded-to.NEWLINENEWLINE Returns:NEWLINE """NEWLINENEWLINE detection_boxes, detection_masks, image_shape, pad_shape = arg_tupleNEWLINENEWLINE detection_masks_reframed = ops.reframe_box_masks_to_image_masks(NEWLINE detection_masks, detection_boxes, image_shape[0], image_shape[1])NEWLINENEWLINE pad_instance_dim = tf.zeros([3, 1], dtype=tf.int32)NEWLINE pad_hw_dim = tf.concat([tf.zeros([1], dtype=tf.int32),NEWLINE pad_shape - image_shape], axis=0)NEWLINE pad_hw_dim = tf.expand_dims(pad_hw_dim, 1)NEWLINE paddings = tf.concat([pad_instance_dim, pad_hw_dim], axis=1)NEWLINE detection_masks_reframed = tf.pad(detection_masks_reframed, paddings)NEWLINENEWLINE # If the masks are currently float, binarize them. Otherwise keep them asNEWLINE # integers, since they have already been thresholded.NEWLINE if detection_masks_reframed.dtype == tf.float32:NEWLINE detection_masks_reframed = tf.greater(detection_masks_reframed, 0.5)NEWLINE return tf.cast(detection_masks_reframed, tf.uint8)NEWLINENEWLINENEWLINEdef resize_detection_masks(detection_boxes, detection_masks,NEWLINE original_image_spatial_shapes):NEWLINE """Resizes per-box detection masks to be relative to the entire image.NEWLINENEWLINE Note that this function only works when the spatial size of all images inNEWLINE the batch is the same. If not, this function should be used with batch_size=1.NEWLINENEWLINE Args:NEWLINE detection_boxes: A [batch_size, num_instances, 4] float tensor containingNEWLINE bounding boxes.NEWLINE detection_masks: A [batch_size, num_instances, height, width] float tensorNEWLINE containing binary instance masks per box.NEWLINE original_image_spatial_shapes: a [batch_size, 3] shaped int tensorNEWLINE holding the spatial dimensions of each image in the batch.NEWLINE Returns:NEWLINE masks: Masks resized to the spatial extents given byNEWLINE (original_image_spatial_shapes[0, 0], original_image_spatial_shapes[0, 1])NEWLINE """NEWLINE # modify original image spatial shapes to be max along each dimNEWLINE # in evaluator, should have access to original_image_spatial_shape fieldNEWLINE # in add_Eval_DictNEWLINE max_spatial_shape = tf.reduce_max(NEWLINE original_image_spatial_shapes, axis=0, keep_dims=True)NEWLINE tiled_max_spatial_shape = tf.tile(NEWLINE max_spatial_shape,NEWLINE multiples=[tf.shape(original_image_spatial_shapes)[0], 1])NEWLINE return shape_utils.static_or_dynamic_map_fn(NEWLINE _resize_detection_masks,NEWLINE elems=[detection_boxes,NEWLINE detection_masks,NEWLINE original_image_spatial_shapes,NEWLINE tiled_max_spatial_shape],NEWLINE dtype=tf.uint8)NEWLINENEWLINENEWLINEdef _resize_groundtruth_masks(args):NEWLINE """Resizes groundtruth masks to the original image size."""NEWLINE mask, true_image_shape, original_image_shape, pad_shape = argsNEWLINE true_height = true_image_shape[0]NEWLINE true_width = true_image_shape[1]NEWLINE mask = mask[:, :true_height, :true_width]NEWLINE mask = tf.expand_dims(mask, 3)NEWLINE mask = tf.image.resize_images(NEWLINE mask,NEWLINE original_image_shape,NEWLINE method=tf.image.ResizeMethod.NEAREST_NEIGHBOR,NEWLINE align_corners=True)NEWLINENEWLINE paddings = tf.concat(NEWLINE [tf.zeros([3, 1], dtype=tf.int32),NEWLINE tf.expand_dims(NEWLINE tf.concat([tf.zeros([1], dtype=tf.int32),NEWLINE pad_shape-original_image_shape], axis=0),NEWLINE 1)], axis=1)NEWLINE mask = tf.pad(tf.squeeze(mask, 3), paddings)NEWLINE return tf.cast(mask, tf.uint8)NEWLINENEWLINENEWLINEdef _resize_surface_coordinate_masks(args):NEWLINE detection_boxes, surface_coords, image_shape = argsNEWLINE surface_coords_v, surface_coords_u = tf.unstack(surface_coords, axis=-1)NEWLINE surface_coords_v_reframed = ops.reframe_box_masks_to_image_masks(NEWLINE surface_coords_v, detection_boxes, image_shape[0], image_shape[1])NEWLINE surface_coords_u_reframed = ops.reframe_box_masks_to_image_masks(NEWLINE surface_coords_u, detection_boxes, image_shape[0], image_shape[1])NEWLINE return tf.stack([surface_coords_v_reframed, surface_coords_u_reframed],NEWLINE axis=-1)NEWLINENEWLINENEWLINEdef _scale_keypoint_to_absolute(args):NEWLINE keypoints, image_shape = argsNEWLINE return keypoint_ops.scale(keypoints, image_shape[0], image_shape[1])NEWLINENEWLINENEWLINEdef result_dict_for_single_example(image,NEWLINE key,NEWLINE detections,NEWLINE groundtruth=None,NEWLINE class_agnostic=False,NEWLINE scale_to_absolute=False):NEWLINE """Merges all detection and groundtruth information for a single example.NEWLINENEWLINE Note that evaluation tools require classes that are 1-indexed, and so thisNEWLINE function performs the offset. If `class_agnostic` is True, all output classesNEWLINE have label 1.NEWLINENEWLINE Args:NEWLINE image: A single 4D uint8 image tensor of shape [1, H, W, C].NEWLINE key: A single string tensor identifying the image.NEWLINE detections: A dictionary of detections, returned fromNEWLINE DetectionModel.postprocess().NEWLINE groundtruth: (Optional) Dictionary of groundtruth items, with fields:NEWLINE 'groundtruth_boxes': [num_boxes, 4] float32 tensor of boxes, inNEWLINE normalized coordinates.NEWLINE 'groundtruth_classes': [num_boxes] int64 tensor of 1-indexed classes.NEWLINE 'groundtruth_area': [num_boxes] float32 tensor of bbox area. (Optional)NEWLINE 'groundtruth_is_crowd': [num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_difficult': [num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_group_of': [num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_instance_masks': 3D int64 tensor of instance masksNEWLINE (Optional).NEWLINE 'groundtruth_keypoints': [num_boxes, num_keypoints, 2] float32 tensor withNEWLINE keypoints (Optional).NEWLINE class_agnostic: Boolean indicating whether the detections are class-agnosticNEWLINE (i.e. binary). Default False.NEWLINE scale_to_absolute: Boolean indicating whether boxes and keypoints should beNEWLINE scaled to absolute coordinates. Note that for IoU based evaluations, itNEWLINE does not matter whether boxes are expressed in absolute or relativeNEWLINE coordinates. Default False.NEWLINENEWLINE Returns:NEWLINE A dictionary with:NEWLINE 'original_image': A [1, H, W, C] uint8 image tensor.NEWLINE 'key': A string tensor with image identifier.NEWLINE 'detection_boxes': [max_detections, 4] float32 tensor of boxes, inNEWLINE normalized or absolute coordinates, depending on the value ofNEWLINE `scale_to_absolute`.NEWLINE 'detection_scores': [max_detections] float32 tensor of scores.NEWLINE 'detection_classes': [max_detections] int64 tensor of 1-indexed classes.NEWLINE 'detection_masks': [max_detections, H, W] float32 tensor of binarizedNEWLINE masks, reframed to full image masks.NEWLINE 'groundtruth_boxes': [num_boxes, 4] float32 tensor of boxes, inNEWLINE normalized or absolute coordinates, depending on the value ofNEWLINE `scale_to_absolute`. (Optional)NEWLINE 'groundtruth_classes': [num_boxes] int64 tensor of 1-indexed classes.NEWLINE (Optional)NEWLINE 'groundtruth_area': [num_boxes] float32 tensor of bbox area. (Optional)NEWLINE 'groundtruth_is_crowd': [num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_difficult': [num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_group_of': [num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_instance_masks': 3D int64 tensor of instance masksNEWLINE (Optional).NEWLINE 'groundtruth_keypoints': [num_boxes, num_keypoints, 2] float32 tensor withNEWLINE keypoints (Optional).NEWLINE """NEWLINENEWLINE if groundtruth:NEWLINE max_gt_boxes = tf.shape(NEWLINE groundtruth[fields.InputDataFields.groundtruth_boxes])[0]NEWLINE for gt_key in groundtruth:NEWLINE # expand groundtruth dict along the batch dimension.NEWLINE groundtruth[gt_key] = tf.expand_dims(groundtruth[gt_key], 0)NEWLINENEWLINE for detection_key in detections:NEWLINE detections[detection_key] = tf.expand_dims(NEWLINE detections[detection_key][0], axis=0)NEWLINENEWLINE batched_output_dict = result_dict_for_batched_example(NEWLINE image,NEWLINE tf.expand_dims(key, 0),NEWLINE detections,NEWLINE groundtruth,NEWLINE class_agnostic,NEWLINE scale_to_absolute,NEWLINE max_gt_boxes=max_gt_boxes)NEWLINENEWLINE exclude_keys = [NEWLINE fields.InputDataFields.original_image,NEWLINE fields.DetectionResultFields.num_detections,NEWLINE fields.InputDataFields.num_groundtruth_boxesNEWLINE ]NEWLINENEWLINE output_dict = {NEWLINE fields.InputDataFields.original_image:NEWLINE batched_output_dict[fields.InputDataFields.original_image]NEWLINE }NEWLINENEWLINE for key in batched_output_dict:NEWLINE # remove the batch dimension.NEWLINE if key not in exclude_keys:NEWLINE output_dict[key] = tf.squeeze(batched_output_dict[key], 0)NEWLINE return output_dictNEWLINENEWLINENEWLINEdef result_dict_for_batched_example(images,NEWLINE keys,NEWLINE detections,NEWLINE groundtruth=None,NEWLINE class_agnostic=False,NEWLINE scale_to_absolute=False,NEWLINE original_image_spatial_shapes=None,NEWLINE true_image_shapes=None,NEWLINE max_gt_boxes=None,NEWLINE label_id_offset=1):NEWLINE """Merges all detection and groundtruth information for a single example.NEWLINENEWLINE Note that evaluation tools require classes that are 1-indexed, and so thisNEWLINE function performs the offset. If `class_agnostic` is True, all output classesNEWLINE have label 1.NEWLINE The groundtruth coordinates of boxes/keypoints in 'groundtruth' dictionary areNEWLINE normalized relative to the (potentially padded) input image, while theNEWLINE coordinates in 'detection' dictionary are normalized relative to the trueNEWLINE image shape.NEWLINENEWLINE Args:NEWLINE images: A single 4D uint8 image tensor of shape [batch_size, H, W, C].NEWLINE keys: A [batch_size] string/int tensor with image identifier.NEWLINE detections: A dictionary of detections, returned fromNEWLINE DetectionModel.postprocess().NEWLINE groundtruth: (Optional) Dictionary of groundtruth items, with fields:NEWLINE 'groundtruth_boxes': [batch_size, max_number_of_boxes, 4] float32 tensorNEWLINE of boxes, in normalized coordinates.NEWLINE 'groundtruth_classes': [batch_size, max_number_of_boxes] int64 tensor ofNEWLINE 1-indexed classes.NEWLINE 'groundtruth_area': [batch_size, max_number_of_boxes] float32 tensor ofNEWLINE bbox area. (Optional)NEWLINE 'groundtruth_is_crowd':[batch_size, max_number_of_boxes] int64NEWLINE tensor. (Optional)NEWLINE 'groundtruth_difficult': [batch_size, max_number_of_boxes] int64NEWLINE tensor. (Optional)NEWLINE 'groundtruth_group_of': [batch_size, max_number_of_boxes] int64NEWLINE tensor. (Optional)NEWLINE 'groundtruth_instance_masks': 4D int64 tensor of instanceNEWLINE masks (Optional).NEWLINE 'groundtruth_keypoints': [batch_size, max_number_of_boxes, num_keypoints,NEWLINE 2] float32 tensor with keypoints (Optional).NEWLINE 'groundtruth_keypoint_visibilities': [batch_size, max_number_of_boxes,NEWLINE num_keypoints] bool tensor with keypoint visibilities (Optional).NEWLINE 'groundtruth_labeled_classes': [batch_size, num_classes] int64NEWLINE tensor of 1-indexed classes. (Optional)NEWLINE 'groundtruth_dp_num_points': [batch_size, max_number_of_boxes] int32NEWLINE tensor. (Optional)NEWLINE 'groundtruth_dp_part_ids': [batch_size, max_number_of_boxes,NEWLINE max_sampled_points] int32 tensor. (Optional)NEWLINE 'groundtruth_dp_surface_coords_list': [batch_size, max_number_of_boxes,NEWLINE max_sampled_points, 4] float32 tensor. (Optional)NEWLINE class_agnostic: Boolean indicating whether the detections are class-agnosticNEWLINE (i.e. binary). Default False.NEWLINE scale_to_absolute: Boolean indicating whether boxes and keypoints should beNEWLINE scaled to absolute coordinates. Note that for IoU based evaluations, itNEWLINE does not matter whether boxes are expressed in absolute or relativeNEWLINE coordinates. Default False.NEWLINE original_image_spatial_shapes: A 2D int32 tensor of shape [batch_size, 2]NEWLINE used to resize the image. When set to None, the image size is retained.NEWLINE true_image_shapes: A 2D int32 tensor of shape [batch_size, 3]NEWLINE containing the size of the unpadded original_image.NEWLINE max_gt_boxes: [batch_size] tensor representing the maximum number ofNEWLINE groundtruth boxes to pad.NEWLINE label_id_offset: offset for class ids.NEWLINENEWLINE Returns:NEWLINE A dictionary with:NEWLINE 'original_image': A [batch_size, H, W, C] uint8 image tensor.NEWLINE 'original_image_spatial_shape': A [batch_size, 2] tensor containing theNEWLINE original image sizes.NEWLINE 'true_image_shape': A [batch_size, 3] tensor containing the size ofNEWLINE the unpadded original_image.NEWLINE 'key': A [batch_size] string tensor with image identifier.NEWLINE 'detection_boxes': [batch_size, max_detections, 4] float32 tensor of boxes,NEWLINE in normalized or absolute coordinates, depending on the value ofNEWLINE `scale_to_absolute`.NEWLINE 'detection_scores': [batch_size, max_detections] float32 tensor of scores.NEWLINE 'detection_classes': [batch_size, max_detections] int64 tensor of 1-indexedNEWLINE classes.NEWLINE 'detection_masks': [batch_size, max_detections, H, W] uint8 tensor ofNEWLINE instance masks, reframed to full image masks. Note that these may beNEWLINE binarized (e.g. {0, 1}), or may contain 1-indexed part labels. (Optional)NEWLINE 'detection_keypoints': [batch_size, max_detections, num_keypoints, 2]NEWLINE float32 tensor containing keypoint coordinates. (Optional)NEWLINE 'detection_keypoint_scores': [batch_size, max_detections, num_keypoints]NEWLINE float32 tensor containing keypoint scores. (Optional)NEWLINE 'detection_surface_coords': [batch_size, max_detection, H, W, 2] float32NEWLINE tensor with normalized surface coordinates (e.g. DensePose UVNEWLINE coordinates). (Optional)NEWLINE 'num_detections': [batch_size] int64 tensor containing number of validNEWLINE detections.NEWLINE 'groundtruth_boxes': [batch_size, num_boxes, 4] float32 tensor of boxes, inNEWLINE normalized or absolute coordinates, depending on the value ofNEWLINE `scale_to_absolute`. (Optional)NEWLINE 'groundtruth_classes': [batch_size, num_boxes] int64 tensor of 1-indexedNEWLINE classes. (Optional)NEWLINE 'groundtruth_area': [batch_size, num_boxes] float32 tensor of bboxNEWLINE area. (Optional)NEWLINE 'groundtruth_is_crowd': [batch_size, num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_difficult': [batch_size, num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_group_of': [batch_size, num_boxes] int64 tensor. (Optional)NEWLINE 'groundtruth_instance_masks': 4D int64 tensor of instance masksNEWLINE (Optional).NEWLINE 'groundtruth_keypoints': [batch_size, num_boxes, num_keypoints, 2] float32NEWLINE tensor with keypoints (Optional).NEWLINE 'groundtruth_keypoint_visibilities': [batch_size, num_boxes, num_keypoints]NEWLINE bool tensor with keypoint visibilities (Optional).NEWLINE 'groundtruth_labeled_classes': [batch_size, num_classes] int64 tensorNEWLINE of 1-indexed classes. (Optional)NEWLINE 'num_groundtruth_boxes': [batch_size] tensor containing the maximum numberNEWLINE of groundtruth boxes per image.NEWLINENEWLINE Raises:NEWLINE ValueError: if original_image_spatial_shape is not 2D int32 tensor of shapeNEWLINE [2].NEWLINE ValueError: if true_image_shapes is not 2D int32 tensor of shapeNEWLINE [3].NEWLINE """NEWLINE input_data_fields = fields.InputDataFieldsNEWLINE if original_image_spatial_shapes is None:NEWLINE original_image_spatial_shapes = tf.tile(NEWLINE tf.expand_dims(tf.shape(images)[1:3], axis=0),NEWLINE multiples=[tf.shape(images)[0], 1])NEWLINE else:NEWLINE if (len(original_image_spatial_shapes.shape) != 2 andNEWLINE original_image_spatial_shapes.shape[1] != 2):NEWLINE raise ValueError(NEWLINE '`original_image_spatial_shape` should be a 2D tensor of shape 'NEWLINE '[batch_size, 2].')NEWLINENEWLINE if true_image_shapes is None:NEWLINE true_image_shapes = tf.tile(NEWLINE tf.expand_dims(tf.shape(images)[1:4], axis=0),NEWLINE multiples=[tf.shape(images)[0], 1])NEWLINE else:NEWLINE if (len(true_image_shapes.shape) != 2NEWLINE and true_image_shapes.shape[1] != 3):NEWLINE raise ValueError('`true_image_shapes` should be a 2D tensor of 'NEWLINE 'shape [batch_size, 3].')NEWLINENEWLINE output_dict = {NEWLINE input_data_fields.original_image:NEWLINE images,NEWLINE input_data_fields.key:NEWLINE keys,NEWLINE input_data_fields.original_image_spatial_shape: (NEWLINE original_image_spatial_shapes),NEWLINE input_data_fields.true_image_shape:NEWLINE true_image_shapesNEWLINE }NEWLINENEWLINE detection_fields = fields.DetectionResultFieldsNEWLINE detection_boxes = detections[detection_fields.detection_boxes]NEWLINE detection_scores = detections[detection_fields.detection_scores]NEWLINE num_detections = tf.cast(detections[detection_fields.num_detections],NEWLINE dtype=tf.int32)NEWLINENEWLINE if class_agnostic:NEWLINE detection_classes = tf.ones_like(detection_scores, dtype=tf.int64)NEWLINE else:NEWLINE detection_classes = (NEWLINE tf.to_int64(detections[detection_fields.detection_classes]) +NEWLINE label_id_offset)NEWLINENEWLINE if scale_to_absolute:NEWLINE output_dict[detection_fields.detection_boxes] = (NEWLINE shape_utils.static_or_dynamic_map_fn(NEWLINE _scale_box_to_absolute,NEWLINE elems=[detection_boxes, original_image_spatial_shapes],NEWLINE dtype=tf.float32))NEWLINE else:NEWLINE output_dict[detection_fields.detection_boxes] = detection_boxesNEWLINE output_dict[detection_fields.detection_classes] = detection_classesNEWLINE output_dict[detection_fields.detection_scores] = detection_scoresNEWLINE output_dict[detection_fields.num_detections] = num_detectionsNEWLINENEWLINE if detection_fields.detection_weightPerObject in detections:NEWLINE output_dict[detection_fields.detection_weightPerObject] = detections[detection_fields.detection_weightPerObject]NEWLINENEWLINE if detection_fields.detection_masks in detections:NEWLINE detection_masks = detections[detection_fields.detection_masks]NEWLINE output_dict[detection_fields.detection_masks] = resize_detection_masks(NEWLINE detection_boxes, detection_masks, original_image_spatial_shapes)NEWLINENEWLINE if detection_fields.detection_surface_coords in detections:NEWLINE detection_surface_coords = detections[NEWLINE detection_fields.detection_surface_coords]NEWLINE output_dict[detection_fields.detection_surface_coords] = (NEWLINE shape_utils.static_or_dynamic_map_fn(NEWLINE _resize_surface_coordinate_masks,NEWLINE elems=[detection_boxes, detection_surface_coords,NEWLINE original_image_spatial_shapes],NEWLINE dtype=tf.float32))NEWLINENEWLINE if detection_fields.detection_keypoints in detections:NEWLINE detection_keypoints = detections[detection_fields.detection_keypoints]NEWLINE output_dict[detection_fields.detection_keypoints] = detection_keypointsNEWLINE if scale_to_absolute:NEWLINE output_dict[detection_fields.detection_keypoints] = (NEWLINE shape_utils.static_or_dynamic_map_fn(NEWLINE _scale_keypoint_to_absolute,NEWLINE elems=[detection_keypoints, original_image_spatial_shapes],NEWLINE dtype=tf.float32))NEWLINE if detection_fields.detection_keypoint_scores in detections:NEWLINE output_dict[detection_fields.detection_keypoint_scores] = detections[NEWLINE detection_fields.detection_keypoint_scores]NEWLINE else:NEWLINE output_dict[detection_fields.detection_keypoint_scores] = tf.ones_like(NEWLINE detections[detection_fields.detection_keypoints][:, :, :, 0])NEWLINENEWLINE if groundtruth:NEWLINE if max_gt_boxes is None:NEWLINE if input_data_fields.num_groundtruth_boxes in groundtruth:NEWLINE max_gt_boxes = groundtruth[input_data_fields.num_groundtruth_boxes]NEWLINE else:NEWLINE raise ValueError(NEWLINE 'max_gt_boxes must be provided when processing batched examples.')NEWLINENEWLINE if input_data_fields.groundtruth_instance_masks in groundtruth:NEWLINE masks = groundtruth[input_data_fields.groundtruth_instance_masks]NEWLINE max_spatial_shape = tf.reduce_max(NEWLINE original_image_spatial_shapes, axis=0, keep_dims=True)NEWLINE tiled_max_spatial_shape = tf.tile(NEWLINE max_spatial_shape,NEWLINE multiples=[tf.shape(original_image_spatial_shapes)[0], 1])NEWLINE groundtruth[input_data_fields.groundtruth_instance_masks] = (NEWLINE shape_utils.static_or_dynamic_map_fn(NEWLINE _resize_groundtruth_masks,NEWLINE elems=[masks, true_image_shapes,NEWLINE original_image_spatial_shapes,NEWLINE tiled_max_spatial_shape],NEWLINE dtype=tf.uint8))NEWLINENEWLINE output_dict.update(groundtruth)NEWLINENEWLINE image_shape = tf.cast(tf.shape(images), tf.float32)NEWLINE image_height, image_width = image_shape[1], image_shape[2]NEWLINENEWLINE def _scale_box_to_normalized_true_image(args):NEWLINE """Scale the box coordinates to be relative to the true image shape."""NEWLINE boxes, true_image_shape = argsNEWLINE true_image_shape = tf.cast(true_image_shape, tf.float32)NEWLINE true_height, true_width = true_image_shape[0], true_image_shape[1]NEWLINE normalized_window = tf.stack([0.0, 0.0, true_height / image_height,NEWLINE true_width / image_width])NEWLINE return box_list_ops.change_coordinate_frame(NEWLINE box_list.BoxList(boxes), normalized_window).get()NEWLINENEWLINE groundtruth_boxes = groundtruth[input_data_fields.groundtruth_boxes]NEWLINE groundtruth_boxes = shape_utils.static_or_dynamic_map_fn(NEWLINE _scale_box_to_normalized_true_image,NEWLINE elems=[groundtruth_boxes, true_image_shapes], dtype=tf.float32)NEWLINE output_dict[input_data_fields.groundtruth_boxes] = groundtruth_boxesNEWLINENEWLINE if input_data_fields.groundtruth_keypoints in groundtruth:NEWLINE # If groundtruth_keypoints is in the groundtruth dictionary. Update theNEWLINE # coordinates to conform with the true image shape.NEWLINE def _scale_keypoints_to_normalized_true_image(args):NEWLINE """Scale the box coordinates to be relative to the true image shape."""NEWLINE keypoints, true_image_shape = argsNEWLINE true_image_shape = tf.cast(true_image_shape, tf.float32)NEWLINE true_height, true_width = true_image_shape[0], true_image_shape[1]NEWLINE normalized_window = tf.stack(NEWLINE [0.0, 0.0, true_height / image_height, true_width / image_width])NEWLINE return keypoint_ops.change_coordinate_frame(keypoints,NEWLINE normalized_window)NEWLINENEWLINE groundtruth_keypoints = groundtruth[NEWLINE input_data_fields.groundtruth_keypoints]NEWLINE groundtruth_keypoints = shape_utils.static_or_dynamic_map_fn(NEWLINE _scale_keypoints_to_normalized_true_image,NEWLINE elems=[groundtruth_keypoints, true_image_shapes],NEWLINE dtype=tf.float32)NEWLINE output_dict[NEWLINE input_data_fields.groundtruth_keypoints] = groundtruth_keypointsNEWLINENEWLINE if scale_to_absolute:NEWLINE groundtruth_boxes = output_dict[input_data_fields.groundtruth_boxes]NEWLINE output_dict[input_data_fields.groundtruth_boxes] = (NEWLINE shape_utils.static_or_dynamic_map_fn(NEWLINE _scale_box_to_absolute,NEWLINE elems=[groundtruth_boxes, original_image_spatial_shapes],NEWLINE dtype=tf.float32))NEWLINE if input_data_fields.groundtruth_keypoints in groundtruth:NEWLINE groundtruth_keypoints = output_dict[NEWLINE input_data_fields.groundtruth_keypoints]NEWLINE output_dict[input_data_fields.groundtruth_keypoints] = (NEWLINE shape_utils.static_or_dynamic_map_fn(NEWLINE _scale_keypoint_to_absolute,NEWLINE elems=[groundtruth_keypoints, original_image_spatial_shapes],NEWLINE dtype=tf.float32))NEWLINENEWLINE # For class-agnostic models, groundtruth classes all become 1.NEWLINE if class_agnostic:NEWLINE groundtruth_classes = groundtruth[input_data_fields.groundtruth_classes]NEWLINE groundtruth_classes = tf.ones_like(groundtruth_classes, dtype=tf.int64)NEWLINE output_dict[input_data_fields.groundtruth_classes] = groundtruth_classesNEWLINENEWLINE output_dict[input_data_fields.num_groundtruth_boxes] = max_gt_boxesNEWLINENEWLINE return output_dictNEWLINENEWLINENEWLINEdef get_evaluators(eval_config, categories, evaluator_options=None):NEWLINE """Returns the evaluator class according to eval_config, valid for categories.NEWLINENEWLINE Args:NEWLINE eval_config: An `eval_pb2.EvalConfig`.NEWLINE categories: A list of dicts, each of which has the following keys -NEWLINE 'id': (required) an integer id uniquely identifying this category.NEWLINE 'name': (required) string representing category name e.g., 'cat', 'dog'.NEWLINE 'keypoints': (optional) dict mapping this category's keypoints to uniqueNEWLINE ids.NEWLINE evaluator_options: A dictionary of metric names (seeNEWLINE EVAL_METRICS_CLASS_DICT) to `DetectionEvaluator` initializationNEWLINE keyword arguments. For example:NEWLINE evalator_options = {NEWLINE 'coco_detection_metrics': {'include_metrics_per_category': True}NEWLINE }NEWLINENEWLINE Returns:NEWLINE An list of instances of DetectionEvaluator.NEWLINENEWLINE Raises:NEWLINE ValueError: if metric is not in the metric class dictionary.NEWLINE """NEWLINE evaluator_options = evaluator_options or {}NEWLINE eval_metric_fn_keys = eval_config.metrics_setNEWLINE if not eval_metric_fn_keys:NEWLINE eval_metric_fn_keys = [EVAL_DEFAULT_METRIC]NEWLINE evaluators_list = []NEWLINE for eval_metric_fn_key in eval_metric_fn_keys:NEWLINE if eval_metric_fn_key not in EVAL_METRICS_CLASS_DICT:NEWLINE raise ValueError('Metric not found: {}'.format(eval_metric_fn_key))NEWLINE kwargs_dict = (evaluator_options[eval_metric_fn_key] if eval_metric_fn_keyNEWLINE in evaluator_options else {})NEWLINE evaluators_list.append(EVAL_METRICS_CLASS_DICT[eval_metric_fn_key](NEWLINE categories,NEWLINE **kwargs_dict))NEWLINENEWLINE if isinstance(eval_config, eval_pb2.EvalConfig):NEWLINE parameterized_metrics = eval_config.parameterized_metricNEWLINE for parameterized_metric in parameterized_metrics:NEWLINE assert parameterized_metric.HasField('parameterized_metric')NEWLINE if parameterized_metric.WhichOneof(NEWLINE 'parameterized_metric') == EVAL_KEYPOINT_METRIC:NEWLINE keypoint_metrics = parameterized_metric.coco_keypoint_metricsNEWLINE # Create category to keypoints mapping dict.NEWLINE category_keypoints = {}NEWLINE class_label = keypoint_metrics.class_labelNEWLINE category = NoneNEWLINE for cat in categories:NEWLINE if cat['name'] == class_label:NEWLINE category = catNEWLINE breakNEWLINE if not category:NEWLINE continueNEWLINE keypoints_for_this_class = category['keypoints']NEWLINE category_keypoints = [{NEWLINE 'id': keypoints_for_this_class[kp_name], 'name': kp_nameNEWLINE } for kp_name in keypoints_for_this_class]NEWLINE # Create keypoint evaluator for this category.NEWLINE evaluators_list.append(EVAL_METRICS_CLASS_DICT[EVAL_KEYPOINT_METRIC](NEWLINE category['id'], category_keypoints, class_label,NEWLINE keypoint_metrics.keypoint_label_to_sigmas))NEWLINE return evaluators_listNEWLINENEWLINENEWLINEdef get_eval_metric_ops_for_evaluators(eval_config,NEWLINE categories,NEWLINE eval_dict):NEWLINE """Returns eval metrics ops to use with `tf.estimator.EstimatorSpec`.NEWLINENEWLINE Args:NEWLINE eval_config: An `eval_pb2.EvalConfig`.NEWLINE categories: A list of dicts, each of which has the following keys -NEWLINE 'id': (required) an integer id uniquely identifying this category.NEWLINE 'name': (required) string representing category name e.g., 'cat', 'dog'.NEWLINE eval_dict: An evaluation dictionary, returned fromNEWLINE result_dict_for_single_example().NEWLINENEWLINE Returns:NEWLINE A dictionary of metric names to tuple of value_op and update_op that can beNEWLINE used as eval metric ops in tf.EstimatorSpec.NEWLINE """NEWLINE eval_metric_ops = {}NEWLINE evaluator_options = evaluator_options_from_eval_config(eval_config)NEWLINE evaluators_list = get_evaluators(eval_config, categories, evaluator_options)NEWLINE for evaluator in evaluators_list:NEWLINE eval_metric_ops.update(evaluator.get_estimator_eval_metric_ops(NEWLINE eval_dict))NEWLINE return eval_metric_opsNEWLINENEWLINENEWLINEdef evaluator_options_from_eval_config(eval_config):NEWLINE """Produces a dictionary of evaluation options for each eval metric.NEWLINENEWLINE Args:NEWLINE eval_config: An `eval_pb2.EvalConfig`.NEWLINENEWLINE Returns:NEWLINE evaluator_options: A dictionary of metric names (seeNEWLINE EVAL_METRICS_CLASS_DICT) to `DetectionEvaluator` initializationNEWLINE keyword arguments. For example:NEWLINE evalator_options = {NEWLINE 'coco_detection_metrics': {'include_metrics_per_category': True}NEWLINE }NEWLINE """NEWLINE eval_metric_fn_keys = eval_config.metrics_setNEWLINE evaluator_options = {}NEWLINE for eval_metric_fn_key in eval_metric_fn_keys:NEWLINE if eval_metric_fn_key in (NEWLINE 'coco_detection_metrics', 'coco_mask_metrics', 'lvis_mask_metrics'):NEWLINE evaluator_options[eval_metric_fn_key] = {NEWLINE 'include_metrics_per_category': (NEWLINE eval_config.include_metrics_per_category)NEWLINE }NEWLINENEWLINE if (hasattr(eval_config, 'all_metrics_per_category') andNEWLINE eval_config.all_metrics_per_category):NEWLINE evaluator_options[eval_metric_fn_key].update({NEWLINE 'all_metrics_per_category': eval_config.all_metrics_per_categoryNEWLINE })NEWLINE # For coco detection eval, if the eval_config proto contains theNEWLINE # "skip_predictions_for_unlabeled_class" field, include this field inNEWLINE # evaluator_options.NEWLINE if eval_metric_fn_key == 'coco_detection_metrics' and hasattr(NEWLINE eval_config, 'skip_predictions_for_unlabeled_class'):NEWLINE evaluator_options[eval_metric_fn_key].update({NEWLINE 'skip_predictions_for_unlabeled_class':NEWLINE (eval_config.skip_predictions_for_unlabeled_class)NEWLINE })NEWLINE for super_category in eval_config.super_categories:NEWLINE if 'super_categories' not in evaluator_options[eval_metric_fn_key]:NEWLINE evaluator_options[eval_metric_fn_key]['super_categories'] = {}NEWLINE key = super_categoryNEWLINE value = eval_config.super_categories[key].split(',')NEWLINE evaluator_options[eval_metric_fn_key]['super_categories'][key] = valueNEWLINE if eval_metric_fn_key == 'lvis_mask_metrics' and hasattr(NEWLINE eval_config, 'export_path'):NEWLINE evaluator_options[eval_metric_fn_key].update({NEWLINE 'export_path': eval_config.export_pathNEWLINE })NEWLINENEWLINE elif eval_metric_fn_key == 'precision_at_recall_detection_metrics':NEWLINE evaluator_options[eval_metric_fn_key] = {NEWLINE 'recall_lower_bound': (eval_config.recall_lower_bound),NEWLINE 'recall_upper_bound': (eval_config.recall_upper_bound)NEWLINE }NEWLINE return evaluator_optionsNEWLINENEWLINENEWLINEdef has_densepose(eval_dict):NEWLINE return (fields.DetectionResultFields.detection_masks in eval_dict andNEWLINE fields.DetectionResultFields.detection_surface_coords in eval_dict)NEWLINE
# drawFace.pywNEWLINE# A program which draws facesNEWLINE"""Write and test a function to meet this specification.NEWLINENEWLINEdrawFace(center, size, win) center is a Point, size is an int, and win is aNEWLINEGraphWin. Draws a simple face of the given size in win.NEWLINENEWLINEYour function can draw a simple smiley (or grim) face. Demonstrate the functionNEWLINEby writing a program that draws several faces of varying size in a singleNEWLINEwindow."""NEWLINENEWLINEfrom graphics import *NEWLINENEWLINEdef drawFace(center, size, win):NEWLINENEWLINE x = center.getX()NEWLINE y = center.getY()NEWLINE head = Circle(Point(x, y), size)NEWLINE head.setOutline("peachpuff")NEWLINE head.setFill("peachpuff")NEWLINE head.draw(win)NEWLINENEWLINE leftEye = Circle(Point((x - (1/2) * size), (y + (1/2) * size))\NEWLINE , (size/10))NEWLINE leftEye.setOutline("black")NEWLINE leftEye.setFill("black")NEWLINE leftEye.draw(win)NEWLINENEWLINE rightEye = leftEye.clone()NEWLINE rightEye.move(size, 0)NEWLINE rightEye.draw(win)NEWLINENEWLINE #upperNose = Line(Point(4.5, 6), Point(3.5, 5))NEWLINE upperNose = Line(Point(x - 1/10 * size, y + size * (1/3)),\NEWLINE Point(x - (1/2) * size, y))NEWLINE upperNose.draw(win)NEWLINENEWLINE #lowerNose = Line(Point(3.5, 5), Point(4.5, 4))NEWLINE lowerNose = Line(Point(x - (1/2) * size, y)\NEWLINE , Point(x - 1/10 * size, y - size * (1/3)))NEWLINE lowerNose.draw(win)NEWLINENEWLINE mouth = leftEye.clone()NEWLINE mouth.move(1/2 * size, -size)NEWLINE mouth.draw(win)NEWLINENEWLINEdef properties(win):NEWLINE # Get where the user wants to center the faceNEWLINE print("\nPlease click where the center of the face should be.")NEWLINE center = win.getMouse() NEWLINENEWLINE # Get the size of the face you want to drawNEWLINE sizeFace = float(input("\nPlease enter the size of the face you want \NEWLINEto draw. "))NEWLINENEWLINE return center, sizeFaceNEWLINENEWLINEdef main():NEWLINENEWLINE # Get the dimensions of the windowNEWLINE winHeight = float(input("Please enter the height of the window you want: "))NEWLINE winWidth = float(input("Please enter the width of the window you want: "))NEWLINE win = GraphWin("Draw a Face", winHeight, winWidth)NEWLINENEWLINE center, size = properties(win)NEWLINENEWLINE drawFace(center, size, win)NEWLINENEWLINE center, size = properties(win)NEWLINENEWLINE drawFace(center, size, win)NEWLINENEWLINE message = Text(Point(winWidth/2, winHeight - (winHeight - winHeight/20))\NEWLINE , "Click anywhere to quit.")NEWLINE message.draw(win)NEWLINE win.getMouse()NEWLINE win.close()NEWLINE NEWLINEmain()NEWLINE
import loggingNEWLINENEWLINEimport numpyNEWLINEfrom numpy.linalg import pinvNEWLINENEWLINEfrom colormath import color_constantsNEWLINENEWLINElogger = logging.getLogger(__name__)NEWLINENEWLINENEWLINE# noinspection PyPep8NamingNEWLINEdef _get_adaptation_matrix(wp_src, wp_dst, observer, adaptation):NEWLINE """NEWLINE Calculate the correct transformation matrix based on origin and targetNEWLINE illuminants. The observer angle must be the same between illuminants.NEWLINENEWLINE See colormath.color_constants.ADAPTATION_MATRICES for a list of possibleNEWLINE adaptations.NEWLINENEWLINE Detailed conversion documentation is available at:NEWLINE http://brucelindbloom.com/Eqn_ChromAdapt.htmlNEWLINE """NEWLINE # Get the appropriate transformation matrix, [MsubA].NEWLINE m_sharp = color_constants.ADAPTATION_MATRICES[adaptation]NEWLINENEWLINE # In case the white-points are still input as stringsNEWLINE # Get white-points for illuminantNEWLINE if type(wp_src) == str:NEWLINE orig_illum = wp_src.lower()NEWLINE wp_src = color_constants.ILLUMINANTS[observer][orig_illum]NEWLINE elif hasattr(wp_src, '__iter__'):NEWLINE wp_src = wp_srcNEWLINENEWLINE if type(wp_dst) == str:NEWLINE targ_illum = wp_dst.lower()NEWLINE wp_dst = color_constants.ILLUMINANTS[observer][targ_illum]NEWLINE elif hasattr(wp_dst, '__iter__'):NEWLINE wp_dst = wp_dstNEWLINENEWLINE # Sharpened cone responses ~ rho gamma beta ~ sharpened r g bNEWLINE rgb_src = numpy.dot(m_sharp, wp_src)NEWLINE rgb_dst = numpy.dot(m_sharp, wp_dst)NEWLINENEWLINE # Ratio of whitepoint sharpened responsesNEWLINE m_rat = numpy.diag(rgb_dst / rgb_src)NEWLINENEWLINE # Final transformation matrixNEWLINE m_xfm = numpy.dot(numpy.dot(pinv(m_sharp), m_rat), m_sharp)NEWLINENEWLINE return m_xfmNEWLINENEWLINENEWLINE# noinspection PyPep8NamingNEWLINEdef apply_chromatic_adaptation(val_x, val_y, val_z, orig_illum, targ_illum,NEWLINE observer='2', adaptation='bradford'):NEWLINE """NEWLINE Applies a chromatic adaptation matrix to convert XYZ values betweenNEWLINE illuminants. It is important to recognize that color transformation resultsNEWLINE in color errors, determined by how far the original illuminant is from theNEWLINE target illuminant. For example, D65 to A could result in very high maximumNEWLINE deviance.NEWLINENEWLINE An informative article with estimate average Delta E values for eachNEWLINE illuminant conversion may be found at:NEWLINENEWLINE http://brucelindbloom.com/ChromAdaptEval.htmlNEWLINE """NEWLINENEWLINE # It's silly to have to do this, but some people may want to call thisNEWLINE # function directly, so we'll protect them from messing up upper/lower case.NEWLINE adaptation = adaptation.lower()NEWLINENEWLINE # Get white-points for illuminantNEWLINE if type(orig_illum) == str:NEWLINE orig_illum = orig_illum.lower()NEWLINE wp_src = color_constants.ILLUMINANTS[observer][orig_illum]NEWLINE elif hasattr(orig_illum, '__iter__'):NEWLINE wp_src = orig_illumNEWLINENEWLINE if type(targ_illum) == str:NEWLINE targ_illum = targ_illum.lower()NEWLINE wp_dst = color_constants.ILLUMINANTS[observer][targ_illum]NEWLINE elif hasattr(targ_illum, '__iter__'):NEWLINE wp_dst = targ_illumNEWLINENEWLINE logger.debug(" \* Applying adaptation matrix: %s", adaptation)NEWLINE # Retrieve the appropriate transformation matrix from the constants.NEWLINE transform_matrix = _get_adaptation_matrix(wp_src, wp_dst,NEWLINE observer, adaptation)NEWLINENEWLINE # Stuff the XYZ values into a NumPy matrix for conversion.NEWLINE XYZ_matrix = numpy.array((val_x, val_y, val_z))NEWLINE # Perform the adaptation via matrix multiplication.NEWLINE result_matrix = numpy.dot(transform_matrix, XYZ_matrix)NEWLINENEWLINE # Return individual X, Y, and Z coordinates.NEWLINE return result_matrix[0], result_matrix[1], result_matrix[2]NEWLINENEWLINENEWLINE# noinspection PyPep8NamingNEWLINEdef apply_chromatic_adaptation_on_color(color, targ_illum, adaptation='bradford'):NEWLINE """NEWLINE Convenience function to apply an adaptation directly to a Color object.NEWLINE """NEWLINENEWLINE xyz_x = color.xyz_xNEWLINE xyz_y = color.xyz_yNEWLINE xyz_z = color.xyz_zNEWLINE orig_illum = color.illuminantNEWLINE targ_illum = targ_illum.lower()NEWLINE observer = color.observerNEWLINE adaptation = adaptation.lower()NEWLINENEWLINE # Return individual X, Y, and Z coordinates.NEWLINE color.xyz_x, color.xyz_y, color.xyz_z = apply_chromatic_adaptation(NEWLINE xyz_x, xyz_y, xyz_z, orig_illum, targ_illum,NEWLINE observer=observer, adaptation=adaptation)NEWLINE color.set_illuminant(targ_illum)NEWLINENEWLINE return colorNEWLINE
import purerpcNEWLINEimport greeter_pb2NEWLINENEWLINENEWLINEclass GreeterServicer(purerpc.Servicer):NEWLINE async def SayHello(self, input_message):NEWLINE raise NotImplementedError()NEWLINENEWLINE async def SayHelloGoodbye(self, input_message):NEWLINE raise NotImplementedError()NEWLINENEWLINE async def SayHelloToMany(self, input_messages):NEWLINE raise NotImplementedError()NEWLINENEWLINE async def SayHelloToManyAtOnce(self, input_messages):NEWLINE raise NotImplementedError()NEWLINENEWLINE @propertyNEWLINE def service(self) -> purerpc.Service:NEWLINE service_obj = purerpc.Service(NEWLINE "Greeter"NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHello",NEWLINE self.SayHello,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHelloGoodbye",NEWLINE self.SayHelloGoodbye,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHelloToMany",NEWLINE self.SayHelloToMany,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE service_obj.add_method(NEWLINE "SayHelloToManyAtOnce",NEWLINE self.SayHelloToManyAtOnce,NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE return service_objNEWLINENEWLINENEWLINEclass GreeterStub:NEWLINE def __init__(self, channel):NEWLINE self._client = purerpc.Client(NEWLINE "Greeter",NEWLINE channelNEWLINE )NEWLINE self.SayHello = self._client.get_method_stub(NEWLINE "SayHello",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE self.SayHelloGoodbye = self._client.get_method_stub(NEWLINE "SayHelloGoodbye",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.UNARY_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE self.SayHelloToMany = self._client.get_method_stub(NEWLINE "SayHelloToMany",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_STREAM,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )NEWLINE self.SayHelloToManyAtOnce = self._client.get_method_stub(NEWLINE "SayHelloToManyAtOnce",NEWLINE purerpc.RPCSignature(NEWLINE purerpc.Cardinality.STREAM_UNARY,NEWLINE greeter_pb2.HelloRequest,NEWLINE greeter_pb2.HelloReply,NEWLINE )NEWLINE )
# -*- coding: utf-8 -*-NEWLINE##############################################################################NEWLINE#NEWLINE# OpenERP, Open Source Management SolutionNEWLINE# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).NEWLINE#NEWLINE# This program is free software: you can redistribute it and/or modifyNEWLINE# it under the terms of the GNU Affero General Public License asNEWLINE# published by the Free Software Foundation, either version 3 of theNEWLINE# License, or (at your option) any later version.NEWLINE#NEWLINE# This program is distributed in the hope that it will be useful,NEWLINE# but WITHOUT ANY WARRANTY; without even the implied warranty ofNEWLINE# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See theNEWLINE# GNU Affero General Public License for more details.NEWLINE#NEWLINE# You should have received a copy of the GNU Affero General Public LicenseNEWLINE# along with this program. If not, see <http://www.gnu.org/licenses/>.NEWLINE#NEWLINE##############################################################################NEWLINENEWLINENEWLINE{NEWLINE 'name': 'Recurring Documents',NEWLINE 'version': '1.0',NEWLINE 'category': 'Tools',NEWLINE 'description': """NEWLINECreate recurring documents.NEWLINE===========================NEWLINENEWLINEThis module allows to create new documents and add subscriptions on that document.NEWLINENEWLINEe.g. To have an invoice generated automatically periodically:NEWLINE-------------------------------------------------------------NEWLINE * Define a document type based on Invoice objectNEWLINE * Define a subscription whose source document is the document defined asNEWLINE above. Specify the interval information and partner to be invoice.NEWLINE """,NEWLINE 'author': 'OpenERP SA',NEWLINE 'depends': ['base'],NEWLINE 'data': ['security/subcription_security.xml', 'security/ir.model.access.csv', 'subscription_view.xml'],NEWLINE 'demo': ['subscription_demo.xml',],NEWLINE 'installable': True,NEWLINE 'auto_install': False,NEWLINE}NEWLINE# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:NEWLINE
from __future__ import absolute_importNEWLINEfrom __future__ import divisionNEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport cv2NEWLINEfrom IPython import displayNEWLINEfrom PIL import ImageNEWLINENEWLINENEWLINEdef cv2_imshow(a):NEWLINE """A replacement for cv2.imshow() for use in Jupyter notebooks.NEWLINE Args:NEWLINE a : np.ndarray. shape (N, M) or (N, M, 1) is an NxM grayscale image. shapeNEWLINE (N, M, 3) is an NxM BGR color image. shape (N, M, 4) is an NxM BGRA colorNEWLINE image.NEWLINE """NEWLINE a = a.clip(0, 255).astype('uint8')NEWLINE # cv2 stores colors as BGR; convert to RGBNEWLINE if a.ndim == 3:NEWLINE if a.shape[2] == 4:NEWLINE a = cv2.cvtColor(a, cv2.COLOR_BGRA2RGBA)NEWLINE else:NEWLINE a = cv2.cvtColor(a, cv2.COLOR_BGR2RGB)NEWLINE display.display(Image.fromarray(a))
# Copyright 2019 Huawei Technologies Co., LtdNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE# ============================================================================NEWLINEimport numpy as npNEWLINENEWLINEimport mindspore as msNEWLINEimport mindspore.nn as nnNEWLINEfrom mindspore.common.api import _executorNEWLINEfrom mindspore.ops import operations as PNEWLINEfrom mindspore.ops import composite as CNEWLINEfrom mindspore import Tensor, contextNEWLINEfrom tests.ut.python.ops.test_math_ops import VirtualLossNEWLINENEWLINENEWLINEgrad_all = C.GradOperation(get_all=True)NEWLINENEWLINENEWLINEclass GradWrap(nn.Cell):NEWLINE def __init__(self, network):NEWLINE super(GradWrap, self).__init__()NEWLINE self.network = networkNEWLINENEWLINE def construct(self, x, y):NEWLINE return grad_all(self.network)(x, y)NEWLINENEWLINEclass NetWithLoss(nn.Cell):NEWLINE def __init__(self, network):NEWLINE super(NetWithLoss, self).__init__()NEWLINE self.loss = VirtualLoss()NEWLINE self.network = networkNEWLINENEWLINE def construct(self, x, y):NEWLINE predict = self.network(x, y)NEWLINE return self.loss(predict)NEWLINENEWLINEclass Net(nn.Cell):NEWLINE def __init__(self, shape, offset, strategy1=None, strategy2=None, target="Device"):NEWLINE super().__init__()NEWLINE self.index = Tensor(np.ones(shape), dtype=ms.int32)NEWLINE self.offset = offsetNEWLINE self.elu = P.EmbeddingLookup().shard(strategy1).add_prim_attr("primitive_target", target)NEWLINE self.mm = P.BatchMatMul().shard(strategy2)NEWLINENEWLINE def construct(self, x, y):NEWLINE out = self.elu(x, self.index, self.offset)NEWLINE out = self.mm(out, y)NEWLINE return outNEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_false():NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = NetWithLoss(Net(shape, offset))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_true():NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = NetWithLoss(Net(shape, offset))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_false_grad():NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset)))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_reducescatter_true_grad():NEWLINE context.set_context(save_graphs=False)NEWLINE shape = [8, 8]NEWLINE offset = 8NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset)))NEWLINE net.set_auto_parallel()NEWLINENEWLINE x = Tensor(np.ones([64, 32]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([8, 32, 8]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_semi_auto1():NEWLINE context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")NEWLINE shape = [64, 32]NEWLINE offset = 0NEWLINE strategy1 = ((8, 1), (1, 1))NEWLINE strategy2 = ((4, 1, 2), (4, 2, 1))NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset, strategy1, strategy2, "CPU")))NEWLINENEWLINE net.set_auto_parallel()NEWLINE x = Tensor(np.ones([64, 64]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINENEWLINENEWLINEdef test_embeddinglookup_semi_auto2():NEWLINE context.set_auto_parallel_context(device_num=8, global_rank=0, parallel_mode="semi_auto_parallel")NEWLINE shape = [64, 32]NEWLINE offset = 0NEWLINE strategy1 = ((1, 8), (1, 1))NEWLINE strategy2 = ((4, 1, 2), (4, 2, 1))NEWLINE net = GradWrap(NetWithLoss(Net(shape, offset, strategy1, strategy2, "CPU")))NEWLINENEWLINE net.set_auto_parallel()NEWLINE x = Tensor(np.ones([64, 64]), dtype=ms.float32)NEWLINE y = Tensor(np.ones([64, 64, 64]), dtype=ms.float32)NEWLINE net.set_train()NEWLINE _executor.compile(net, x, y)NEWLINE
from ingest.api.ingestapi import IngestApiNEWLINEfrom ingest.exporter.bundle import BundleService, BundleNEWLINEfrom ingest.exporter.metadata import MetadataServiceNEWLINEfrom ingest.exporter.staging import StagingServiceNEWLINENEWLINENEWLINEclass SubmissionEnvelopeParseException(Exception):NEWLINE passNEWLINENEWLINENEWLINEclass SubmissionEnvelope:NEWLINENEWLINE def __init__(self, uuid, staging_area_uuid):NEWLINE self.uuid = uuidNEWLINE self.staging_area_uuid = staging_area_uuidNEWLINENEWLINE @staticmethodNEWLINE def from_dict(source: dict):NEWLINE try:NEWLINE uuid = source['uuid']['uuid']NEWLINE staging_area_uuid = source['stagingDetails']['stagingAreaUuid']['uuid']NEWLINE return SubmissionEnvelope(uuid, staging_area_uuid)NEWLINE except (KeyError, TypeError) as e:NEWLINE raise SubmissionEnvelopeParseException(e)NEWLINENEWLINENEWLINEclass Exporter:NEWLINENEWLINE def __init__(self, ingest_api: IngestApi, metadata_service: MetadataService,NEWLINE staging_service: StagingService, bundle_service: BundleService):NEWLINE self.ingest_api = ingest_apiNEWLINE self.metadata_service = metadata_serviceNEWLINE self.staging_service = staging_serviceNEWLINE self.bundle_service = bundle_serviceNEWLINENEWLINE def export_update(self, submission_source: dict, bundle_uuid: str, metadata_urls: list,NEWLINE update_version: str):NEWLINE bundle = self.bundle_service.fetch(bundle_uuid)NEWLINE submission = SubmissionEnvelope.from_dict(submission_source)NEWLINE staging_details = self._apply_metadata_updates(bundle, metadata_urls,NEWLINE submission.staging_area_uuid)NEWLINE bundle.update_version(update_version)NEWLINE self.bundle_service.update(bundle, staging_details)NEWLINE manifest = bundle.generate_manifest(submission.uuid)NEWLINE self.ingest_api.create_bundle_manifest(manifest)NEWLINENEWLINE def _apply_metadata_updates(self, bundle: Bundle, metadata_urls, staging_area_uuid):NEWLINE staging_details = []NEWLINE for url in metadata_urls:NEWLINE metadata_resource = self.metadata_service.fetch_resource(url)NEWLINE staging_info = self.staging_service.stage_metadata(staging_area_uuid, metadata_resource)NEWLINE staging_details.append(staging_info)NEWLINE bundle.update_file(metadata_resource)NEWLINE return staging_detailsNEWLINE
## This file is part of ScapyNEWLINE## See http://www.secdev.org/projects/scapy for more informationsNEWLINE## Copyright (C) Philippe Biondi <phil@secdev.org>NEWLINE## Copyright (C) Gabriel Potter <gabriel@potter.fr>NEWLINE## This program is published under a GPLv2 licenseNEWLINENEWLINE"""NEWLINEPython 2 and 3 link classes.NEWLINE"""NEWLINENEWLINEfrom __future__ import absolute_importNEWLINEimport base64NEWLINEimport binasciiNEWLINENEWLINEimport scapy.modules.six as sixNEWLINENEWLINE###########NEWLINE# Python3 #NEWLINE###########NEWLINENEWLINEdef cmp_to_key(mycmp):NEWLINE # TODO remove me once all 'key=cmp_to_key(..)' has been fixed in utils6.py, automaton.pyNEWLINE """Convert a cmp= function into a key= function.NEWLINE To use with sort()NEWLINENEWLINE e.g: def stg_cmp(a, b):NEWLINE return a == bNEWLINE list.sort(key=cmp_to_key(stg_cmp))NEWLINE """NEWLINE class K(object):NEWLINE def __init__(self, obj, *args):NEWLINE self.obj = objNEWLINE def __lt__(self, other):NEWLINE return mycmp(self.obj, other.obj) < 0NEWLINE def __gt__(self, other):NEWLINE return mycmp(self.obj, other.obj) > 0NEWLINE def __eq__(self, other):NEWLINE return mycmp(self.obj, other.obj) == 0NEWLINE def __le__(self, other):NEWLINE return mycmp(self.obj, other.obj) <= 0 NEWLINE def __ge__(self, other):NEWLINE return mycmp(self.obj, other.obj) >= 0NEWLINE def __ne__(self, other):NEWLINE return mycmp(self.obj, other.obj) != 0NEWLINE return KNEWLINENEWLINEdef cmp(a, b):NEWLINE """Old Python 2 function"""NEWLINE return (a > b) - (a < b)NEWLINENEWLINENEWLINEif six.PY2:NEWLINE def orb(x):NEWLINE """Return ord(x) when necessary."""NEWLINE if isinstance(x, basestring):NEWLINE return ord(x)NEWLINE return xNEWLINEelse:NEWLINE def orb(x):NEWLINE """Return ord(x) when necessary."""NEWLINE if isinstance(x, (bytes, str)):NEWLINE return ord(x)NEWLINE return xNEWLINENEWLINENEWLINEif six.PY2:NEWLINE def raw(x):NEWLINE """Convert a str, a packet to bytes"""NEWLINE if x is None:NEWLINE return NoneNEWLINE if hasattr(x, "__bytes__"):NEWLINE return x.__bytes__()NEWLINE try:NEWLINE return chr(x)NEWLINE except (ValueError, TypeError):NEWLINE return str(x)NEWLINENEWLINE def plain_str(x):NEWLINE """Convert basic byte objects to str"""NEWLINE return x if isinstance(x, basestring) else str(x)NEWLINENEWLINE def chb(x):NEWLINE """Same than chr() but encode as bytes.NEWLINENEWLINE """NEWLINE if isinstance(x, bytes):NEWLINE return xNEWLINE else:NEWLINE if hasattr(x, "__int__") and not isinstance(x, int):NEWLINE return bytes(chr(int(x)))NEWLINE return bytes(chr(x))NEWLINEelse:NEWLINE def raw(x):NEWLINE """Convert a str, an int, a list of ints, a packet to bytes"""NEWLINE try:NEWLINE return bytes(x)NEWLINE except TypeError:NEWLINE return bytes(x, encoding="utf8")NEWLINENEWLINE def plain_str(x):NEWLINE """Convert basic byte objects to str"""NEWLINE if isinstance(x, bytes):NEWLINE return x.decode('utf8')NEWLINE return x if isinstance(x, str) else str(x)NEWLINENEWLINE def chb(x):NEWLINE """Same than chr() but encode as bytes.NEWLINENEWLINE """NEWLINE if isinstance(x, bytes):NEWLINE return xNEWLINE else:NEWLINE if hasattr(x, "__int__") and not isinstance(x, int):NEWLINE return bytes([int(x)])NEWLINE return bytes([x])NEWLINENEWLINEdef bytes_hex(x):NEWLINE """Hexify a str or a bytes object"""NEWLINE return binascii.b2a_hex(raw(x))NEWLINENEWLINEdef hex_bytes(x):NEWLINE """De-hexify a str or a byte object"""NEWLINE return binascii.a2b_hex(raw(x))NEWLINENEWLINEdef base64_bytes(x):NEWLINE """Turn base64 into bytes"""NEWLINE if six.PY2:NEWLINE return base64.decodestring(x)NEWLINE return base64.decodebytes(raw(x))NEWLINENEWLINEdef bytes_base64(x):NEWLINE """Turn bytes into base64"""NEWLINE if six.PY2:NEWLINE return base64.encodestring(x).replace('\n', '')NEWLINE return base64.encodebytes(raw(x)).replace(b'\n', b'')NEWLINE
# !PythonNEWLINENEWLINE# Chapter 4 practice projectsNEWLINENEWLINEgrid = [['.','.','.','.','.','.'],NEWLINE ['.','0','0','.','.','.'],NEWLINE ['0','0','0','0','.','.'],NEWLINE ['0','0','0','0','0','.'],NEWLINE ['.','0','0','0','0','0'],NEWLINE ['0','0','0','0','0','.'],NEWLINE ['0','0','0','0','.','.'],NEWLINE ['.','0','0','.','.','.'],NEWLINE ['.','.','.','.','.','.']]NEWLINENEWLINEfor x in range(len(grid[0])):NEWLINE print()NEWLINE for y in range(len(grid)):NEWLINE print(grid[y][x], end="")NEWLINE
# coding=utf-8NEWLINE# --------------------------------------------------------------------------NEWLINE# Copyright (c) Microsoft Corporation. All rights reserved.NEWLINE# Licensed under the MIT License. See License.txt in the project root forNEWLINE# license information.NEWLINE#NEWLINE# Code generated by Microsoft (R) AutoRest Code Generator.NEWLINE# Changes may cause incorrect behavior and will be lost if the code isNEWLINE# regenerated.NEWLINE# --------------------------------------------------------------------------NEWLINENEWLINEfrom .azure_entity_resource import AzureEntityResourceNEWLINENEWLINENEWLINEclass ImmutabilityPolicy(AzureEntityResource):NEWLINE """The ImmutabilityPolicy property of a blob container, including Id, resourceNEWLINE name, resource type, Etag.NEWLINENEWLINE Variables are only populated by the server, and will be ignored whenNEWLINE sending a request.NEWLINENEWLINE All required parameters must be populated in order to send to Azure.NEWLINENEWLINE :ivar id: Fully qualified resource Id for the resource. Ex -NEWLINE /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{resourceType}/{resourceName}NEWLINE :vartype id: strNEWLINE :ivar name: The name of the resourceNEWLINE :vartype name: strNEWLINE :ivar type: The type of the resource. Ex-NEWLINE Microsoft.Compute/virtualMachines or Microsoft.Storage/storageAccounts.NEWLINE :vartype type: strNEWLINE :ivar etag: Resource Etag.NEWLINE :vartype etag: strNEWLINE :param immutability_period_since_creation_in_days: Required. TheNEWLINE immutability period for the blobs in the container since the policyNEWLINE creation, in days.NEWLINE :type immutability_period_since_creation_in_days: intNEWLINE :ivar state: The ImmutabilityPolicy state of a blob container, possibleNEWLINE values include: Locked and Unlocked. Possible values include: 'Locked',NEWLINE 'Unlocked'NEWLINE :vartype state: str orNEWLINE ~azure.mgmt.storage.v2018_11_01.models.ImmutabilityPolicyStateNEWLINE """NEWLINENEWLINE _validation = {NEWLINE 'id': {'readonly': True},NEWLINE 'name': {'readonly': True},NEWLINE 'type': {'readonly': True},NEWLINE 'etag': {'readonly': True},NEWLINE 'immutability_period_since_creation_in_days': {'required': True},NEWLINE 'state': {'readonly': True},NEWLINE }NEWLINENEWLINE _attribute_map = {NEWLINE 'id': {'key': 'id', 'type': 'str'},NEWLINE 'name': {'key': 'name', 'type': 'str'},NEWLINE 'type': {'key': 'type', 'type': 'str'},NEWLINE 'etag': {'key': 'etag', 'type': 'str'},NEWLINE 'immutability_period_since_creation_in_days': {'key': 'properties.immutabilityPeriodSinceCreationInDays', 'type': 'int'},NEWLINE 'state': {'key': 'properties.state', 'type': 'str'},NEWLINE }NEWLINENEWLINE def __init__(self, **kwargs):NEWLINE super(ImmutabilityPolicy, self).__init__(**kwargs)NEWLINE self.immutability_period_since_creation_in_days = kwargs.get('immutability_period_since_creation_in_days', None)NEWLINE self.state = NoneNEWLINE
from kivy.app import AppNEWLINEfrom kivy.uix.boxlayout import BoxLayoutNEWLINEfrom kivy.uix.gridlayout import GridLayoutNEWLINEfrom kivy.uix.button import ButtonNEWLINEfrom kivy.uix.label import LabelNEWLINEfrom kivy.uix.textinput import TextInputNEWLINEfrom kivy.uix.widget import WidgetNEWLINENEWLINEfrom ExerciseRunning import *NEWLINENEWLINEfrom kivy.graphics import *NEWLINEfrom kivy.core.text import Label as CoreLabelNEWLINE#https://groups.google.com/forum/#!topic/kivy-users/zRCjfhBcX4cNEWLINENEWLINEclass ExerciseRunningStatisticsWidget( GridLayout ):NEWLINE def __init__( self, exercise_sets_reps_weights_name,NEWLINE journal, **kwargs ):NEWLINE super( ExerciseRunningStatisticsWidget,NEWLINE self ).__init__( **kwargs )NEWLINE self.cols = 1NEWLINE self.spacing = 1NEWLINE self.exercise_name = exercise_sets_reps_weights_nameNEWLINE excercise_label = Label(NEWLINE text = "(ExerciseRunning, default plot type)",NEWLINE size_hint_y = 0.1 )NEWLINE self.add_widget( excercise_label )NEWLINE self.drawing_widget = Widget()NEWLINE self.add_widget( self.drawing_widget )NEWLINE self.drawing_widget.bind( size = self.update_drawing )NEWLINE NEWLINE def update_drawing(self, *args):NEWLINE self.drawing_widget.canvas.clear()NEWLINE self.drawing_widget.canvas.add( Color( 1, 1, 1) )NEWLINE self.drawing_widget.bg_rect = Rectangle(NEWLINE pos = (0, 0),NEWLINE size = ( self.drawing_widget.width,NEWLINE self.drawing_widget.height ) )NEWLINE self.drawing_widget.canvas.add( self.drawing_widget.bg_rect )NEWLINE journal = App.get_running_app().journalNEWLINE drawing_instructions = \NEWLINE ExerciseRunningStatisticsWidget.gen_drawing_instructions(NEWLINE self.exercise_name, journal,NEWLINE self.drawing_widget.bg_rect.size )NEWLINE self.drawing_widget.canvas.add( drawing_instructions )NEWLINE NEWLINE @classmethodNEWLINE def gen_drawing_instructions( cls, ex_name, journal, rect_size ):NEWLINE plot = InstructionGroup()NEWLINE # testNEWLINE plot.add( Color( 1, 1, 0) )NEWLINE plot.add( Rectangle( pos = (0, 0), size = ( 100, 100 ) ) )NEWLINE #NEWLINE axes_offsets, axes_sizes, axes_instr = \NEWLINE cls.axes_instructions( rect_size )NEWLINE plot.add( axes_instr )NEWLINE dates_exces = cls.get_dates_exercises_pairs( ex_name, journal )NEWLINE ticks_instr = cls.ticks_instructions( axes_offsets,NEWLINE axes_sizes,NEWLINE dates_exces )NEWLINE plot.add( ticks_instr )NEWLINE plot.add( cls.plot_dists_times( dates_exces,NEWLINE axes_offsets,NEWLINE axes_sizes ) )NEWLINE return plotNEWLINENEWLINE @classmethodNEWLINE def axes_instructions( cls, rect_size ):NEWLINE axes = InstructionGroup()NEWLINE offset_x = 0.05 * rect_size[0]NEWLINE offset_y = 0.1 * rect_size[1]NEWLINE line_width = 2NEWLINE axes.add( Color( 0, 0, 0) )NEWLINE axes.add( Line(NEWLINE points = [ offset_x, offset_y,NEWLINE offset_x, rect_size[1] - offset_y ],NEWLINE width = line_width ) )NEWLINE axes.add( Line(NEWLINE points = [ offset_x, offset_y,NEWLINE rect_size[0] - offset_x, offset_y ],NEWLINE width = line_width ) )NEWLINE return ( ( offset_x, offset_y ),NEWLINE ( rect_size[0] - 2 * offset_x,NEWLINE rect_size[1] - 2 * offset_y ),NEWLINE axes )NEWLINENEWLINE @classmethodNEWLINE def ticks_instructions( cls, axes_offsets, axes_sizes, dates_exces ):NEWLINE x_ticks = InstructionGroup()NEWLINE ticks_len = 5NEWLINE if len( dates_exces ) != 0:NEWLINE x_ticks_distance = axes_sizes[0] / len( dates_exces )NEWLINE else:NEWLINE x_ticks_distance = 0NEWLINE yyyy_mm_dd = [ x.split(' ')[0] for (x, y) in dates_exces ]NEWLINE for i, d in enumerate( yyyy_mm_dd ):NEWLINE x_ticks.add(NEWLINE Line( points =NEWLINE [ axes_offsets[0] + ( i + 1 ) * x_ticks_distance,NEWLINE axes_offsets[1],NEWLINE axes_offsets[0] + ( i + 1 ) * x_ticks_distance,NEWLINE axes_offsets[1] - ticks_len ],NEWLINE width = 3 ) )NEWLINE text_label = CoreLabel( text=d, font_size = 15 )NEWLINE text_label.refresh()NEWLINE texture = text_label.textureNEWLINE texture_size = list( texture.size )NEWLINE x_ticks.add( Rectangle(NEWLINE texture = texture,NEWLINE size = texture_size,NEWLINE pos = (NEWLINE axes_offsets[0] + ( i + 1 ) * x_ticks_distance - 45,NEWLINE axes_offsets[1] - ticks_len - 25 )))NEWLINE return x_ticksNEWLINE NEWLINE NEWLINE @classmethodNEWLINE def get_dates_exercises_pairs( cls, ex_name, journal ):NEWLINE dates_exces = []NEWLINE # move to Journal class?NEWLINE for tr in journal.trainings:NEWLINE for ex in tr.exercises:NEWLINE if ex_name == ex.description.get( 'name' ):NEWLINE dates_exces.append( NEWLINE ( tr.description.get( "start_time" ), ex ) )NEWLINE return dates_excesNEWLINE NEWLINENEWLINE @classmethodNEWLINE def plot_dists_times( cls, dates_exces, axes_offset, axes_size ):NEWLINE dists_times_instr = InstructionGroup()NEWLINE if len( dates_exces ) != 0:NEWLINE distance_between_centers = axes_size[0] / len( dates_exces )NEWLINE else:NEWLINE distance_between_centers = 0NEWLINE max_total = 0NEWLINE for d, ex in dates_exces:NEWLINE # move to sep functionNEWLINE ex_total = 0NEWLINE dists = ex.description.get("distances")NEWLINE for dist in dists:NEWLINE try:NEWLINE ex_total = ex_total + float(dist)NEWLINE except ValueError:NEWLINE ex_total = ex_total + 0NEWLINE if ex_total > max_total:NEWLINE max_total = ex_totalNEWLINE if max_total != 0:NEWLINE y_distance = axes_size[1] / ( max_total + 1 )NEWLINE else:NEWLINE y_distance = 0NEWLINE for i, (d, ex) in enumerate( dates_exces ):NEWLINE distances = ex.description.get("distances")NEWLINE times = ex.description.get("times")NEWLINE float_dists = []NEWLINE for dist in distances:NEWLINE try:NEWLINE float_dists.append( float(dist) )NEWLINE except ValueError:NEWLINE float_dists.append( 0 )NEWLINE for f_d, d in enumerate( float_dists ):NEWLINE y_pos_top = axes_offset[1] + \NEWLINE sum( float_dists[0:f_d+1] ) * y_distanceNEWLINE y_pos_bottom = axes_offset[1] + \NEWLINE sum( float_dists[0:f_d] ) * y_distanceNEWLINE x_center_pos = \NEWLINE axes_offset[0] + distance_between_centers * (i + 1)NEWLINE x_size = 10NEWLINE y_size = y_pos_top - y_pos_bottomNEWLINE dists_times_instr.add(NEWLINE Line( points = [ x_center_pos - 5, y_pos_top,NEWLINE x_center_pos + 5, y_pos_top ],NEWLINE width = 3 ) )NEWLINE text_label = CoreLabel( text = str(d), font_size = 15 )NEWLINE text_label.refresh()NEWLINE texture = text_label.textureNEWLINE texture_size = list( texture.size )NEWLINE dists_times_instr.add( Rectangle(NEWLINE texture = texture,NEWLINE size = texture_size,NEWLINE pos = (NEWLINE x_center_pos - 10,NEWLINE y_pos_bottom + (y_pos_top - y_pos_bottom) / 2 )))NEWLINE return dists_times_instrNEWLINE
from __future__ import print_functionNEWLINENEWLINEimport osNEWLINEimport astNEWLINEimport jsonNEWLINEimport globNEWLINEimport reNEWLINEimport sysNEWLINEimport demisto_clientNEWLINEfrom threading import Thread, LockNEWLINEfrom demisto_client.demisto_api.rest import ApiExceptionNEWLINEfrom demisto_sdk.commands.common.tools import run_threads_listNEWLINENEWLINEfrom google.cloud.storage import BucketNEWLINEfrom packaging.version import VersionNEWLINEfrom typing import ListNEWLINENEWLINEfrom Tests.Marketplace.marketplace_services import init_storage_client, Pack, load_jsonNEWLINEfrom Tests.Marketplace.upload_packs import download_and_extract_indexNEWLINEfrom Tests.Marketplace.marketplace_constants import GCPConfig, PACKS_FULL_PATH, IGNORED_FILES, PACKS_FOLDER, MetadataNEWLINEfrom Tests.scripts.utils.content_packs_util import is_pack_deprecatedNEWLINEfrom Tests.scripts.utils import logging_wrapper as loggingNEWLINENEWLINEPACK_METADATA_FILE = 'pack_metadata.json'NEWLINEPACK_PATH_VERSION_REGEX = re.compile(fr'^{GCPConfig.PRODUCTION_STORAGE_BASE_PATH}/[A-Za-z0-9-_.]+/(\d+\.\d+\.\d+)/[A-Za-z0-9-_.]'NEWLINE r'+\.zip$')NEWLINESUCCESS_FLAG = TrueNEWLINENEWLINENEWLINEdef get_pack_id_from_error_with_gcp_path(error: str) -> str:NEWLINE """NEWLINE Gets the id of the pack from the pack's path in GCP that is mentioned in the error msg.NEWLINE Args:NEWLINE error: path of pack in GCP.NEWLINENEWLINE Returns:NEWLINE The id of given pack.NEWLINE """NEWLINE return error.split('/packs/')[1].split('.zip')[0].split('/')[0]NEWLINENEWLINENEWLINEdef get_pack_display_name(pack_id: str) -> str:NEWLINE """NEWLINE Gets the display name of the pack from the pack ID.NEWLINENEWLINE :param pack_id: ID of the pack.NEWLINE :return: Name found in the pack metadata, otherwise an empty string.NEWLINE """NEWLINE metadata_path = os.path.join(PACKS_FULL_PATH, pack_id, PACK_METADATA_FILE)NEWLINE if pack_id and os.path.isfile(metadata_path):NEWLINE with open(metadata_path, 'r') as json_file:NEWLINE pack_metadata = json.load(json_file)NEWLINE return pack_metadata.get('name')NEWLINE return ''NEWLINENEWLINENEWLINEdef is_pack_hidden(pack_id: str) -> bool:NEWLINE """NEWLINE Check if the given pack is deprecated.NEWLINENEWLINE :param pack_id: ID of the pack.NEWLINE :return: True if the pack is deprecated, i.e. has 'hidden: true' field, False otherwise.NEWLINE """NEWLINE metadata_path = os.path.join(PACKS_FULL_PATH, pack_id, PACK_METADATA_FILE)NEWLINE if pack_id and os.path.isfile(metadata_path):NEWLINE with open(metadata_path, 'r') as json_file:NEWLINE pack_metadata = json.load(json_file)NEWLINE return pack_metadata.get('hidden', False)NEWLINE else:NEWLINE logging.warning(f'Could not open metadata file of pack {pack_id}')NEWLINE return FalseNEWLINENEWLINENEWLINEdef create_dependencies_data_structure(response_data: dict, dependants_ids: list, dependencies_data: list,NEWLINE checked_packs: list):NEWLINE """ Recursively creates the packs' dependencies data structure for the installation requestsNEWLINE (only required and uninstalled).NEWLINENEWLINE Args:NEWLINE response_data (dict): The GET /search/dependencies response data.NEWLINE dependants_ids (list): A list of the dependant packs IDs.NEWLINE dependencies_data (list): The dependencies data structure to be created.NEWLINE checked_packs (list): Required dependants that were already found.NEWLINE """NEWLINENEWLINE next_call_dependants_ids = []NEWLINENEWLINE for dependency in response_data:NEWLINE dependants = dependency.get('dependants', {})NEWLINE for dependant in dependants.keys():NEWLINE is_required = dependants[dependant].get('level', '') == 'required'NEWLINE if dependant in dependants_ids and is_required and dependency.get('id') not in checked_packs:NEWLINE dependencies_data.append({NEWLINE 'id': dependency.get('id'),NEWLINE 'version': dependency.get('extras', {}).get('pack', {}).get('currentVersion')NEWLINE })NEWLINE next_call_dependants_ids.append(dependency.get('id'))NEWLINE checked_packs.append(dependency.get('id'))NEWLINENEWLINE if next_call_dependants_ids:NEWLINE create_dependencies_data_structure(response_data, next_call_dependants_ids, dependencies_data, checked_packs)NEWLINENEWLINENEWLINEdef get_pack_dependencies(client: demisto_client, pack_data: dict, lock: Lock):NEWLINE """ Get the pack's required dependencies.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE pack_data (dict): Contains the pack ID and version.NEWLINE lock (Lock): A lock object.NEWLINE Returns:NEWLINE (list) The pack's dependencies.NEWLINE """NEWLINE pack_id = pack_data['id']NEWLINE logging.debug(f'Getting dependencies for pack {pack_id}')NEWLINE try:NEWLINE response_data, status_code, _ = demisto_client.generic_request_func(NEWLINE client,NEWLINE path='/contentpacks/marketplace/search/dependencies',NEWLINE method='POST',NEWLINE body=[pack_data],NEWLINE accept='application/json',NEWLINE _request_timeout=NoneNEWLINE )NEWLINENEWLINE if 200 <= status_code < 300:NEWLINE dependencies_data: list = []NEWLINE dependants_ids = [pack_id]NEWLINE reseponse_data = ast.literal_eval(response_data).get('dependencies', [])NEWLINE create_dependencies_data_structure(reseponse_data, dependants_ids, dependencies_data, dependants_ids)NEWLINE dependencies_str = ', '.join([dep['id'] for dep in dependencies_data])NEWLINE if dependencies_data:NEWLINE logging.debug(f'Found the following dependencies for pack {pack_id}: {dependencies_str}')NEWLINE return dependencies_dataNEWLINE if status_code == 400:NEWLINE logging.error(f'Unable to find dependencies for {pack_id}.')NEWLINE return []NEWLINE else:NEWLINE result_object = ast.literal_eval(response_data)NEWLINE msg = result_object.get('message', '')NEWLINE raise Exception(f'Failed to get pack {pack_id} dependencies - with status code {status_code}\n{msg}\n')NEWLINE except Exception:NEWLINE logging.exception(f'The request to get pack {pack_id} dependencies has failed.')NEWLINENEWLINE lock.acquire()NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINE lock.release()NEWLINENEWLINENEWLINEdef search_pack(client: demisto_client,NEWLINE pack_display_name: str,NEWLINE pack_id: str,NEWLINE lock: Lock) -> dict:NEWLINE """ Make a pack search request.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE pack_display_name (string): The pack display name.NEWLINE pack_id (string): The pack ID.NEWLINE lock (Lock): A lock object.NEWLINE Returns:NEWLINE (dict): Returns the pack data if found, or empty dict otherwise.NEWLINE """NEWLINENEWLINE try:NEWLINE # make the search requestNEWLINE response_data, status_code, _ = demisto_client.generic_request_func(client,NEWLINE path=f'/contentpacks/marketplace/{pack_id}',NEWLINE method='GET',NEWLINE accept='application/json',NEWLINE _request_timeout=None)NEWLINENEWLINE if 200 <= status_code < 300:NEWLINE result_object = ast.literal_eval(response_data)NEWLINENEWLINE if result_object and result_object.get('currentVersion'):NEWLINE logging.debug(f'Found pack "{pack_display_name}" by its ID "{pack_id}" in bucket!')NEWLINENEWLINE pack_data = {NEWLINE 'id': result_object.get('id'),NEWLINE 'version': result_object.get('currentVersion')NEWLINE }NEWLINE return pack_dataNEWLINENEWLINE else:NEWLINE raise Exception(f'Did not find pack "{pack_display_name}" by its ID "{pack_id}" in bucket.')NEWLINE else:NEWLINE result_object = ast.literal_eval(response_data)NEWLINE msg = result_object.get('message', '')NEWLINE err_msg = f'Search request for pack "{pack_display_name}" with ID "{pack_id}", failed with status code ' \NEWLINE f'{status_code}\n{msg}'NEWLINE raise Exception(err_msg)NEWLINE except Exception:NEWLINE logging.exception(f'Search request for pack "{pack_display_name}" with ID "{pack_id}", failed.')NEWLINENEWLINE lock.acquire()NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINE lock.release()NEWLINE return {}NEWLINENEWLINENEWLINEdef find_malformed_pack_id(body: str) -> List:NEWLINE """NEWLINE Find the pack ID from the installation error message in the case the error is that the pack is not found orNEWLINE in case that the error is that the pack's version is invalid.NEWLINE Args:NEWLINE body (str): The response message of the failed installation pack.NEWLINENEWLINE Returns: list of malformed ids (list)NEWLINENEWLINE """NEWLINE malformed_ids = []NEWLINE if body:NEWLINE response_info = json.loads(body)NEWLINE if error_info := response_info.get('error'):NEWLINE errors_info = [error_info]NEWLINE else:NEWLINE # the error is returned as a list of errorNEWLINE errors_info = response_info.get('errors', [])NEWLINE for error in errors_info:NEWLINE if 'pack id: ' in error:NEWLINE malformed_ids.extend(error.split('pack id: ')[1].replace(']', '').replace('[', '').replace(NEWLINE ' ', '').split(','))NEWLINE else:NEWLINE malformed_pack_pattern = re.compile(r'invalid version [0-9.]+ for pack with ID ([\w_-]+)')NEWLINE malformed_pack_id = malformed_pack_pattern.findall(str(error))NEWLINE if malformed_pack_id and error:NEWLINE malformed_ids.extend(malformed_pack_id)NEWLINE return malformed_idsNEWLINENEWLINENEWLINEdef handle_malformed_pack_ids(malformed_pack_ids, packs_to_install):NEWLINE """NEWLINE Handles the case where the malformed id failed the installation but it was not a part of the initial installaion.NEWLINE This is in order to prevent an infinite loop for this such edge case.NEWLINE Args:NEWLINE malformed_pack_ids: the ids found from the error msgNEWLINE packs_to_install: list of packs that was already installed that caused the failure.NEWLINENEWLINE Returns:NEWLINE raises an error.NEWLINE """NEWLINE for malformed_pack_id in malformed_pack_ids:NEWLINE if malformed_pack_id not in {pack['id'] for pack in packs_to_install}:NEWLINE raise Exception(f'The pack {malformed_pack_id} has failed to install even 'NEWLINE f'though it was not in the installation list')NEWLINENEWLINENEWLINEdef install_packs_from_artifacts(client: demisto_client, host: str, test_pack_path: str, pack_ids_to_install: List):NEWLINE """NEWLINE Installs all the packs located in the artifacts folder of the BitHub actions build. Please note:NEWLINE The server always returns a 200 status even if the pack was not installed.NEWLINENEWLINE :param client: Demisto-py client to connect to the server.NEWLINE :param host: FQDN of the server.NEWLINE :param test_pack_path: Path the the test pack directory.NEWLINE :param pack_ids_to_install: List of pack IDs to install.NEWLINE :return: None. Call to server waits until a successful response.NEWLINE """NEWLINE logging.info(f"Test pack path is: {test_pack_path}")NEWLINE logging.info(f"Pack IDs to install are: {pack_ids_to_install}")NEWLINENEWLINE local_packs = glob.glob(f"{test_pack_path}/*.zip")NEWLINENEWLINE for local_pack in local_packs:NEWLINE if any(pack_id in local_pack for pack_id in pack_ids_to_install):NEWLINE logging.info(f'Installing the following pack: {local_pack}')NEWLINE upload_zipped_packs(client=client, host=host, pack_path=local_pack)NEWLINENEWLINENEWLINEdef install_packs_private(client: demisto_client,NEWLINE host: str,NEWLINE pack_ids_to_install: List,NEWLINE test_pack_path: str):NEWLINE """ Make a packs installation request.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE host (str): The server URL.NEWLINE pack_ids_to_install (list): List of Pack IDs to install.NEWLINE test_pack_path (str): Path where test packs are located.NEWLINE """NEWLINE install_packs_from_artifacts(client,NEWLINE host,NEWLINE pack_ids_to_install=pack_ids_to_install,NEWLINE test_pack_path=test_pack_path)NEWLINENEWLINENEWLINEdef install_packs(client: demisto_client,NEWLINE host: str,NEWLINE packs_to_install: list,NEWLINE request_timeout: int = 999999,NEWLINE ):NEWLINE """ Make a packs installation request.NEWLINE If a pack fails to install due to malformed pack, this function catches the corrupted pack and call anotherNEWLINE request to install packs again, this time without the corrupted pack.NEWLINE If a pack fails to install due to timeout when sending a request to GCP,NEWLINE request to install all packs again once more.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE host (str): The server URL.NEWLINE packs_to_install (list): A list of the packs to install.NEWLINE request_timeout (int): Timeout settings for the installation request.NEWLINE """NEWLINENEWLINE class GCPTimeOutException(ApiException):NEWLINE def __init__(self, error):NEWLINE if '/packs/' in error:NEWLINE self.pack_id = get_pack_id_from_error_with_gcp_path(error)NEWLINE super().__init__()NEWLINENEWLINE class MalformedPackException(ApiException):NEWLINE def __init__(self, pack_ids):NEWLINE self.malformed_ids = pack_idsNEWLINE super().__init__()NEWLINENEWLINE class GeneralItemNotFoundError(ApiException):NEWLINE def __init__(self, error_msg):NEWLINE self.error_msg = error_msgNEWLINE super().__init__()NEWLINENEWLINE def call_install_packs_request(packs):NEWLINE try:NEWLINE logging.debug(f'Installing the following packs on server {host}:\n{[pack["id"] for pack in packs]}')NEWLINE response_data, status_code, _ = demisto_client.generic_request_func(client,NEWLINE path='/contentpacks/marketplace/install',NEWLINE method='POST',NEWLINE body={'packs': packs,NEWLINE 'ignoreWarnings': True},NEWLINE accept='application/json',NEWLINE _request_timeout=request_timeout)NEWLINENEWLINE if status_code in range(200, 300) and status_code != 204:NEWLINE packs_data = [{'ID': pack.get('id'), 'CurrentVersion': pack.get('currentVersion')} for pack inNEWLINE ast.literal_eval(response_data)]NEWLINE logging.success(f'Packs were successfully installed on server {host}')NEWLINE logging.debug(f'The packs that were successfully installed on server {host}:\n{packs_data}')NEWLINENEWLINE except ApiException as ex:NEWLINE if 'timeout awaiting response' in ex.body:NEWLINE raise GCPTimeOutException(ex.body)NEWLINE if malformed_ids := find_malformed_pack_id(ex.body):NEWLINE raise MalformedPackException(malformed_ids)NEWLINE if 'Item not found' in ex.body:NEWLINE raise GeneralItemNotFoundError(ex.body)NEWLINE raise exNEWLINENEWLINE try:NEWLINE logging.info(f'Installing packs on server {host}')NEWLINE try:NEWLINE call_install_packs_request(packs_to_install)NEWLINENEWLINE except MalformedPackException as e:NEWLINE # if this is malformed pack error, remove malformed packs and retry until successNEWLINE handle_malformed_pack_ids(e.malformed_ids, packs_to_install)NEWLINE logging.warning(f'The request to install packs on server {host} has failed, retrying without packs 'NEWLINE f'{e.malformed_ids}')NEWLINE return install_packs(client, host, [pack for pack in packs_to_install if pack['id'] not in e.malformed_ids],NEWLINE request_timeout)NEWLINENEWLINE except GCPTimeOutException as e:NEWLINE # if this is a gcp timeout, try only once moreNEWLINE logging.warning(f'The request to install packs on server {host} has failed due to timeout awaiting response'NEWLINE f' headers while trying to install pack {e.pack_id}, trying again for one more time')NEWLINE call_install_packs_request(packs_to_install)NEWLINENEWLINE except GeneralItemNotFoundError as e:NEWLINE logging.warning(f'The request to install all packs on server {host} has failed due to an item not found 'NEWLINE f'error, with the message: {e.error_msg}.\n trying again for one more time')NEWLINE call_install_packs_request(packs_to_install)NEWLINENEWLINE except Exception as e:NEWLINE logging.exception(f'The request to install packs has failed. Additional info: {str(e)}')NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINENEWLINE finally:NEWLINE return SUCCESS_FLAGNEWLINENEWLINENEWLINEdef search_pack_and_its_dependencies(client: demisto_client,NEWLINE pack_id: str,NEWLINE packs_to_install: list,NEWLINE installation_request_body: list,NEWLINE lock: Lock):NEWLINE """ Searches for the pack of the specified file path, as well as its dependencies,NEWLINE and updates the list of packs to be installed accordingly.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE pack_id (str): The id of the pack to be installed.NEWLINE packs_to_install (list) A list of the packs to be installed in this iteration.NEWLINE installation_request_body (list): A list of packs to be installed, in the request format.NEWLINE lock (Lock): A lock object.NEWLINE """NEWLINE pack_data = {}NEWLINE if pack_id not in packs_to_install:NEWLINE pack_display_name = get_pack_display_name(pack_id)NEWLINE if pack_display_name:NEWLINE pack_data = search_pack(client, pack_display_name, pack_id, lock)NEWLINE if pack_data is None:NEWLINE pack_data = {NEWLINE 'id': pack_id,NEWLINE 'version': '1.0.0'NEWLINE }NEWLINENEWLINE if pack_data:NEWLINE dependencies = get_pack_dependencies(client, pack_data, lock)NEWLINENEWLINE current_packs_to_install = [pack_data]NEWLINE if dependencies:NEWLINE # Check that the dependencies don't include a deprecated pack:NEWLINE for dependency in dependencies:NEWLINE pack_path = os.path.join(PACKS_FOLDER, dependency.get('id'))NEWLINE if is_pack_deprecated(pack_path):NEWLINE logging.critical(f'Pack {pack_id} depends on pack {dependency.get("id")} which is a deprecated 'NEWLINE f'pack.')NEWLINE global SUCCESS_FLAGNEWLINE SUCCESS_FLAG = FalseNEWLINE else:NEWLINE current_packs_to_install.extend(dependencies)NEWLINENEWLINE lock.acquire()NEWLINE for pack in current_packs_to_install:NEWLINE if pack['id'] not in packs_to_install:NEWLINE packs_to_install.append(pack['id'])NEWLINE installation_request_body.append(pack)NEWLINE lock.release()NEWLINENEWLINENEWLINEdef get_latest_version_from_bucket(pack_id: str, production_bucket: Bucket) -> str:NEWLINE """ Retrieves the latest version of pack in the bucketNEWLINENEWLINE Args:NEWLINE pack_id (str): The pack id to retrieve the latest versionNEWLINE production_bucket (Bucket): The GCS production bucketNEWLINENEWLINE Returns: The latest version of the pack as it is in the production bucketNEWLINENEWLINE """NEWLINE pack_bucket_path = os.path.join(GCPConfig.PRODUCTION_STORAGE_BASE_PATH, pack_id)NEWLINE logging.debug(f'Trying to get latest version for pack {pack_id} from bucket path {pack_bucket_path}')NEWLINE # Adding the '/' in the end of the prefix to search for the exact pack idNEWLINE pack_versions_paths = [f.name for f in production_bucket.list_blobs(prefix=f'{pack_bucket_path}/') ifNEWLINE f.name.endswith('.zip')]NEWLINENEWLINE pack_versions = []NEWLINE for path in pack_versions_paths:NEWLINE versions = PACK_PATH_VERSION_REGEX.findall(path)NEWLINE if not versions:NEWLINE continueNEWLINE pack_versions.append(Version(versions[0]))NEWLINENEWLINE logging.debug(f'Found the following zips for {pack_id} pack: {pack_versions}')NEWLINE if pack_versions:NEWLINE pack_latest_version = str(max(pack_versions))NEWLINE return pack_latest_versionNEWLINE else:NEWLINE logging.error(f'Could not find any versions for pack {pack_id} in bucket path {pack_bucket_path}')NEWLINE return ''NEWLINENEWLINENEWLINEdef get_pack_installation_request_data(pack_id: str, pack_version: str):NEWLINE """NEWLINE Returns the installation request data of a given pack and its version. The request must have the ID and Version.NEWLINENEWLINE :param pack_id: Id of the pack to add.NEWLINE :param pack_version: Version of the pack to add.NEWLINE :return: The request data part of the packNEWLINE """NEWLINE return {NEWLINE 'id': pack_id,NEWLINE 'version': pack_versionNEWLINE }NEWLINENEWLINENEWLINEdef install_all_content_packs_for_nightly(client: demisto_client, host: str, service_account: str):NEWLINE """ Iterates over the packs currently located in the Packs directory. Wrapper for install_packs.NEWLINE Retrieving the latest version of each pack from the production bucket.NEWLINENEWLINE :param client: Demisto-py client to connect to the server.NEWLINE :param host: FQDN of the server.NEWLINE :param service_account: The full path to the service account json.NEWLINE :return: None. Prints the response from the server in the build.NEWLINE """NEWLINE all_packs = []NEWLINENEWLINE # Initiate the GCS client and get the production bucketNEWLINE storage_client = init_storage_client(service_account)NEWLINE production_bucket = storage_client.bucket(GCPConfig.PRODUCTION_BUCKET)NEWLINE logging.debug(f"Installing all content packs for nightly flow in server {host}")NEWLINENEWLINE # Add deprecated packs to IGNORED_FILES list:NEWLINE for pack_id in os.listdir(PACKS_FULL_PATH):NEWLINE if is_pack_hidden(pack_id):NEWLINE logging.debug(f'Skipping installation of hidden pack "{pack_id}"')NEWLINE IGNORED_FILES.append(pack_id)NEWLINENEWLINE for pack_id in os.listdir(PACKS_FULL_PATH):NEWLINE if pack_id not in IGNORED_FILES:NEWLINE pack_version = get_latest_version_from_bucket(pack_id, production_bucket)NEWLINE if pack_version:NEWLINE all_packs.append(get_pack_installation_request_data(pack_id, pack_version))NEWLINE install_packs(client, host, all_packs)NEWLINENEWLINENEWLINEdef install_all_content_packs_from_build_bucket(client: demisto_client, host: str, server_version: str,NEWLINE bucket_packs_root_path: str, service_account: str,NEWLINE extract_destination_path: str):NEWLINE """ Iterates over the packs currently located in the Build bucket. Wrapper for install_packs.NEWLINE Retrieving the metadata of the latest version of each pack from the index.zip of the build bucket.NEWLINENEWLINE :param client: Demisto-py client to connect to the server.NEWLINE :param host: FQDN of the server.NEWLINE :param server_version: The version of the server the packs are installed on.NEWLINE :param bucket_packs_root_path: The prefix to the root of packs in the bucketNEWLINE :param service_account: Google Service AccountNEWLINE :param extract_destination_path: the full path of extract folder for the index.NEWLINE :return: None. Prints the response from the server in the build.NEWLINE """NEWLINE all_packs = []NEWLINE logging.debug(f"Installing all content packs in server {host} from packs path {bucket_packs_root_path}")NEWLINENEWLINE storage_client = init_storage_client(service_account)NEWLINE build_bucket = storage_client.bucket(GCPConfig.CI_BUILD_BUCKET)NEWLINE index_folder_path, _, _ = download_and_extract_index(build_bucket, extract_destination_path, bucket_packs_root_path)NEWLINENEWLINE for pack_id in os.listdir(index_folder_path):NEWLINE if os.path.isdir(os.path.join(index_folder_path, pack_id)):NEWLINE metadata_path = os.path.join(index_folder_path, pack_id, Pack.METADATA)NEWLINE pack_metadata = load_json(metadata_path)NEWLINE if 'partnerId' in pack_metadata: # not installing private packsNEWLINE continueNEWLINE pack_version = pack_metadata.get(Metadata.CURRENT_VERSION, Metadata.SERVER_DEFAULT_MIN_VERSION)NEWLINE server_min_version = pack_metadata.get(Metadata.SERVER_MIN_VERSION, Metadata.SERVER_DEFAULT_MIN_VERSION)NEWLINE hidden = pack_metadata.get(Metadata.HIDDEN, False)NEWLINE # Check if the server version is greater than the minimum server version required for this pack or if theNEWLINE # pack is hidden (deprecated):NEWLINE if ('Master' in server_version or Version(server_version) >= Version(server_min_version)) and \NEWLINE not hidden:NEWLINE logging.debug(f"Appending pack id {pack_id}")NEWLINE all_packs.append(get_pack_installation_request_data(pack_id, pack_version))NEWLINE else:NEWLINE reason = 'Is hidden' if hidden else f'min server version is {server_min_version}'NEWLINE logging.debug(f'Pack: {pack_id} with version: {pack_version} will not be installed on {host}. 'NEWLINE f'Pack {reason}.')NEWLINE return install_packs(client, host, all_packs)NEWLINENEWLINENEWLINEdef upload_zipped_packs(client: demisto_client,NEWLINE host: str,NEWLINE pack_path: str):NEWLINE """ Install packs from zip file.NEWLINENEWLINE Args:NEWLINE client (demisto_client): The configured client to use.NEWLINE host (str): The server URL.NEWLINE pack_path (str): path to pack zip.NEWLINE """NEWLINE header_params = {NEWLINE 'Content-Type': 'multipart/form-data'NEWLINE }NEWLINE auth_settings = ['api_key', 'csrf_token', 'x-xdr-auth-id']NEWLINE file_path = os.path.abspath(pack_path)NEWLINE files = {'file': file_path}NEWLINENEWLINE logging.info(f'Making "POST" request to server {host} - to install all packs from file {pack_path}')NEWLINENEWLINE # make the pack installation requestNEWLINE try:NEWLINE response_data, status_code, _ = client.api_client.call_api(resource_path='/contentpacks/installed/upload',NEWLINE method='POST',NEWLINE auth_settings=auth_settings,NEWLINE header_params=header_params, files=files)NEWLINENEWLINE if 200 <= status_code < 300:NEWLINE logging.info(f'All packs from file {pack_path} were successfully installed on server {host}')NEWLINE else:NEWLINE result_object = ast.literal_eval(response_data)NEWLINE message = result_object.get('message', '')NEWLINE raise Exception(f'Failed to install packs - with status code {status_code}\n{message}')NEWLINE except Exception:NEWLINE logging.exception('The request to install packs has failed.')NEWLINE sys.exit(1)NEWLINENEWLINENEWLINEdef search_and_install_packs_and_their_dependencies_private(test_pack_path: str,NEWLINE pack_ids: list,NEWLINE client: demisto_client):NEWLINE """ Searches for the packs from the specified list, searches their dependencies, and then installs them.NEWLINE Args:NEWLINE test_pack_path (str): Path of where the test packs are located.NEWLINE pack_ids (list): A list of the pack ids to search and install.NEWLINE client (demisto_client): The client to connect to.NEWLINENEWLINE Returns (list, bool):NEWLINE A list of the installed packs' ids, or an empty list if is_nightly == True.NEWLINE A flag that indicates if the operation succeeded or not.NEWLINE """NEWLINE host = client.api_client.configuration.hostNEWLINENEWLINE logging.info(f'Starting to search and install packs in server: {host}')NEWLINENEWLINE install_packs_private(client, host, pack_ids, test_pack_path)NEWLINENEWLINE return SUCCESS_FLAGNEWLINENEWLINENEWLINEdef search_and_install_packs_and_their_dependencies(pack_ids: list,NEWLINE client: demisto_client, hostname: str = ''):NEWLINE """ Searches for the packs from the specified list, searches their dependencies, and thenNEWLINE installs them.NEWLINE Args:NEWLINE pack_ids (list): A list of the pack ids to search and install.NEWLINE client (demisto_client): The client to connect to.NEWLINE hostname (str): Hostname of instance. Using for logs.NEWLINENEWLINE Returns (list, bool):NEWLINE A list of the installed packs' ids, or an empty list if is_nightly == True.NEWLINE A flag that indicates if the operation succeeded or not.NEWLINE """NEWLINE host = hostname if hostname else client.api_client.configuration.hostNEWLINENEWLINE logging.info(f'Starting to search and install packs in server: {host}')NEWLINENEWLINE packs_to_install: list = [] # we save all the packs we want to install, to avoid duplicationsNEWLINE installation_request_body: list = [] # the packs to install, in the request formatNEWLINENEWLINE threads_list = []NEWLINE lock = Lock()NEWLINENEWLINE for pack_id in pack_ids:NEWLINE thread = Thread(target=search_pack_and_its_dependencies,NEWLINE kwargs={'client': client,NEWLINE 'pack_id': pack_id,NEWLINE 'packs_to_install': packs_to_install,NEWLINE 'installation_request_body': installation_request_body,NEWLINE 'lock': lock})NEWLINE threads_list.append(thread)NEWLINE run_threads_list(threads_list)NEWLINENEWLINE install_packs(client, host, installation_request_body)NEWLINENEWLINE return packs_to_install, SUCCESS_FLAGNEWLINE
# Copyright (C) 2013 Jaedyn K. DraperNEWLINE#NEWLINE# Permission is hereby granted, free of charge, to any person obtainingNEWLINE# a copy of this software and associated documentation files (the "Software"),NEWLINE# to deal in the Software without restriction, including without limitationNEWLINE# the rights to use, copy, modify, merge, publish, distribute, sublicense,NEWLINE# and/or sell copies of the Software, and to permit persons to whom theNEWLINE# Software is furnished to do so, subject to the following conditions:NEWLINE#NEWLINE# The above copyright notice and this permission notice shall be included inNEWLINE# all copies or substantial portions of the Software.NEWLINE#NEWLINE# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS ORNEWLINE# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,NEWLINE# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALLNEWLINE# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHERNEWLINE# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,NEWLINE# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THENEWLINE# SOFTWARE.NEWLINENEWLINE"""NEWLINE**Provides info about the terminal the makefile's being run in**NEWLINE"""NEWLINENEWLINEimport platformNEWLINEimport sysNEWLINENEWLINEif platform.system( ) == "Windows":NEWLINE import ctypesNEWLINE import structNEWLINEelse:NEWLINE import cursesNEWLINENEWLINENEWLINEclass TermColor( object ):NEWLINE """NEWLINE Abstracts color in a cross-platform way. Values and types will differ based on platform.NEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE DGREY = 0 | 8NEWLINE RED = 4 | 8NEWLINE GREEN = 2 | 8NEWLINE YELLOW = 2 | 4 | 8NEWLINE BLUE = 1 | 8NEWLINE MAGENTA = 1 | 4 | 8NEWLINE CYAN = 1 | 2 | 8NEWLINE WHITE = 1 | 2 | 4 | 8NEWLINE BLACK = 0NEWLINE DRED = 4NEWLINE DGREEN = 2NEWLINE DYELLOW = 2 | 4NEWLINE DBLUE = 1NEWLINE DMAGENTA = 1 | 4NEWLINE DCYAN = 1 | 2NEWLINE LGREY = 1 | 2 | 4NEWLINE else:NEWLINE DGREY = "1;30"NEWLINE RED = "1;31"NEWLINE GREEN = "1;32"NEWLINE YELLOW = "1;33"NEWLINE BLUE = "1;34"NEWLINE MAGENTA = "1;35"NEWLINE CYAN = "1;36"NEWLINE WHITE = "1;37"NEWLINE BLACK = "22;30"NEWLINE DRED = "22;31"NEWLINE DGREEN = "22;32"NEWLINE DYELLOW = "22;33"NEWLINE DBLUE = "22;34"NEWLINE DMAGENTA = "22;35"NEWLINE DCYAN = "22;36"NEWLINE LGREY = "22;37"NEWLINENEWLINENEWLINEclass TermInfo( object ):NEWLINE """NEWLINE Provides access to cross-platform methods of getting terminal info and interacting withNEWLINE colored output.NEWLINE """NEWLINE @staticmethodNEWLINE def ResetColor( ):NEWLINE """NEWLINE Reset the color of the terminal to its default valueNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE ctypes.windll.kernel32.SetConsoleTextAttribute( ctypes.windll.kernel32.GetStdHandle( -11 ), TermInfo._reset )NEWLINE else:NEWLINE sys.stdout.write( "\033[0m" )NEWLINENEWLINENEWLINE @staticmethodNEWLINE def SetColor( color ):NEWLINE """NEWLINE Set the color of the terminalNEWLINENEWLINE :param color: The desired colorNEWLINE :type color: TermColor valueNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE ctypes.windll.kernel32.SetConsoleTextAttribute( ctypes.windll.kernel32.GetStdHandle( -11 ), color )NEWLINE else:NEWLINE sys.stdout.write( "\033[{}m".format( color ) )NEWLINENEWLINENEWLINE @staticmethodNEWLINE def GetNumColumns( ):NEWLINE """NEWLINE Retrieve the current column count for this terminalNEWLINENEWLINE :return: Number of columnsNEWLINE :rtype: intNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE csbi = ctypes.create_string_buffer( 22 )NEWLINE res = ctypes.windll.kernel32.GetConsoleScreenBufferInfo( ctypes.windll.kernel32.GetStdHandle( -11 ), csbi )NEWLINE if res:NEWLINE (bufx, bufy, curx, cury, wattr, left, top, right, bottom, maxx, maxy) = struct.unpack( "hhhhHhhhhhh", csbi.raw )NEWLINE return right - leftNEWLINE else:NEWLINE return 0NEWLINENEWLINE else:NEWLINE if TermInfo.cursesValid:NEWLINE return curses.tigetnum( 'cols' )NEWLINE else:NEWLINE return 0NEWLINENEWLINENEWLINE @staticmethodNEWLINE def SupportsColor( ):NEWLINE """NEWLINE Check whether the active terminal supports colors.NEWLINENEWLINE :return: Whether or not color is supportedNEWLINE :rtype: boolNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE return TermInfo._color_supportedNEWLINE else:NEWLINE if TermInfo.cursesValid:NEWLINE return (curses.tigetnum( "colors" ) >= 8)NEWLINE else:NEWLINE return FalseNEWLINENEWLINENEWLINE @staticmethodNEWLINE def GetDefaultColor( ):NEWLINE """NEWLINE Get the default color for this terminalNEWLINENEWLINE :return: The default colorNEWLINE :rtype: TermColor valueNEWLINE """NEWLINE if platform.system( ) == "Windows":NEWLINE # Based on IPython's winconsole.py, written by Alexander BelchenkoNEWLINE import structNEWLINENEWLINE csbi = ctypes.create_string_buffer( 22 )NEWLINE res = ctypes.windll.kernel32.GetConsoleScreenBufferInfo( ctypes.windll.kernel32.GetStdHandle( -11 ), csbi )NEWLINE assert resNEWLINENEWLINE (bufx, bufy, curx, cury, wattr,NEWLINE left, top, right, bottom, maxx, maxy) = struct.unpack( "hhhhHhhhhhh", csbi.raw )NEWLINE return wattrNEWLINE else:NEWLINE return "0"NEWLINENEWLINENEWLINEif platform.system( ) == "Windows":NEWLINE # -11 = STD_OUTPUT_HANDLENEWLINE try:NEWLINE #TermInfo._handle = ctypes.windll.kernel32.GetStdHandle( -11 )NEWLINE TermInfo._reset = TermInfo.GetDefaultColor( )NEWLINE except:NEWLINE TermInfo._color_supported = FalseNEWLINE else:NEWLINE TermInfo._color_supported = TrueNEWLINEelse:NEWLINE try:NEWLINE curses.setupterm( )NEWLINE except:NEWLINE TermInfo.cursesValid = FalseNEWLINE else:NEWLINE TermInfo.cursesValid = TrueNEWLINE
raise NotImplementedError('plugin not implemented')NEWLINE
from typing import ListNEWLINENEWLINEfrom ..metadata import ShardMetadataNEWLINENEWLINEdef _check_shard_metadata_pair_overlap(shard1: ShardMetadata, shard2: ShardMetadata):NEWLINE """NEWLINE Checks if two shards overlap.NEWLINE """NEWLINENEWLINE # For each dim of each shard, check if one shard resides on the otherNEWLINE # end of second shard with respect to that dim. As an example for a 2DNEWLINE # shard, we would check if one shard is above or on the left of theNEWLINE # other shard.NEWLINE ndims = len(shard1.shard_offsets)NEWLINE for i in range(ndims):NEWLINE if shard1.shard_offsets[i] >= shard2.shard_offsets[i] + shard2.shard_sizes[i]:NEWLINE return FalseNEWLINE if shard2.shard_offsets[i] >= shard1.shard_offsets[i] + shard1.shard_sizes[i]:NEWLINE return FalseNEWLINENEWLINE return TrueNEWLINENEWLINEdef validate_non_overlapping_shards_metadata(shards: List[ShardMetadata]):NEWLINE """NEWLINE Ensures none of the shards overlap with each other.NEWLINENEWLINE Args:NEWLINE shards(List[ShardMetadata]): List of :class:`ShardMetadata` objects representingNEWLINE each shard.NEWLINE Raises:NEWLINE ``ValueError`` if there's overlap in any two shards.NEWLINE """NEWLINE # TODO: evaluate optimizing this if needed.NEWLINE for i in range(len(shards)):NEWLINE for j in range(i + 1, len(shards)):NEWLINE if _check_shard_metadata_pair_overlap(shards[i], shards[j]):NEWLINE raise ValueError(f'Shards {shards[i]} and {shards[j]} overlap')NEWLINENEWLINENEWLINEdef check_tensor(shards_metadata, tensor_dims) -> None:NEWLINE """NEWLINE Checks if the shards_metadata is compatible with the provided tensor dims.NEWLINENEWLINE Args:NEWLINE shards_metadata(List[ShardMetadata]): List of :class:`ShardMetadata`NEWLINE objects representing each shard of the tensor.NEWLINE tensor_dims(Sequence of int): Dimensions of tensor to verifyNEWLINE Raises:NEWLINE ``ValueError`` if not compatible.NEWLINE """NEWLINENEWLINE # If the tensor's volume matches the total volume of all shards andNEWLINE # all shard boundaries are within tensor dims, we have a compatibleNEWLINE # sharding spec for this tensor. Note that we have already verifiedNEWLINE # we don't have overlapping shards.NEWLINE tensor_rank = len(tensor_dims)NEWLINE shards_rank = len(shards_metadata[0].shard_offsets)NEWLINE if tensor_rank != shards_rank:NEWLINE raise ValueError(f'Rank of tensor is {tensor_rank}, but shards rank is {shards_rank}')NEWLINENEWLINE total_shard_volume = 0NEWLINE for shard in shards_metadata:NEWLINE shard_volume = 1NEWLINE for i, shard_length in enumerate(shard.shard_sizes):NEWLINE shard_volume *= shard_lengthNEWLINE if shard.shard_offsets[i] + shard.shard_sizes[i] > tensor_dims[i]:NEWLINE raise ValueError(NEWLINE f'Shard offset {shard.shard_offsets[i]} and length 'NEWLINE f'{shard.shard_sizes[i]} exceeds tensor dim: {tensor_dims[i]} for shard {shard}')NEWLINE total_shard_volume += shard_volumeNEWLINENEWLINE tensor_volume = 1NEWLINE for size in tensor_dims:NEWLINE tensor_volume *= sizeNEWLINENEWLINE if total_shard_volume != tensor_volume:NEWLINE # TODO: Can we improve this error message to point out the gaps?NEWLINE raise ValueError(NEWLINE f'Total volume of shards: {total_shard_volume} 'NEWLINE f'does not match tensor volume: {tensor_volume}, in other words 'NEWLINE f'all the individual shards do not cover the entire tensor')NEWLINENEWLINEdef get_split_size(dim_size, chunks):NEWLINE """NEWLINE Computes the split size inline with ``torch.chunk``NEWLINENEWLINE Args:NEWLINE dim_size(int): Size of the dimension being chunked.NEWLINE chunks(int): Number of chunks to create for ``dim_size``.NEWLINENEWLINE Returns:NEWLINE An int indicating the split size to use.NEWLINE """NEWLINE return (dim_size + chunks - 1) // chunksNEWLINENEWLINEdef get_chunked_dim_size(dim_size, split_size, idx):NEWLINE """NEWLINE Computes the dim size of the chunk for provided ``idx`` given ``dim_size``NEWLINE and ``split_size``.NEWLINENEWLINE Args:NEWLINE dim_size(int): Size of the dimension being chunked.NEWLINE split_size(int): The chunk size for each chunk of ``dim_size``.NEWLINE idx(int): The index of chunk whose dim size is being requested.NEWLINENEWLINE Returns:NEWLINE An int indicating the dim size of the chunk.NEWLINE """NEWLINE return max(min(dim_size, split_size * (idx + 1)) - split_size * idx, 0)NEWLINENEWLINEdef get_chunk_sharding_params(sharding_dim_size, world_size, spec, rank):NEWLINE """NEWLINE Generate the start pos and offset length for the current rank forNEWLINE chunk sharding.NEWLINENEWLINE Args:NEWLINE sharding_dim_size(int): The dimension length which we shard on.NEWLINE world_size(int): number of ranks.NEWLINE spec (:class:`torch.distributed._shard.sharding_spec.ChunkShardingSpec`):NEWLINE sharding spec.NEWLINE rank(int): # of cuda process.NEWLINENEWLINE Returns:NEWLINE start_pos(int): start position of sharded tensor on the given rank.NEWLINE chunk_size(int): chunk size of sharded tensor on the given rank.NEWLINE """NEWLINE split_size = get_split_size(sharding_dim_size, world_size)NEWLINE current_offsets = 0NEWLINE start_pos = current_offsetsNEWLINE for idx, placement in enumerate(spec.placements):NEWLINE chunk_size = get_chunked_dim_size(sharding_dim_size, split_size, idx)NEWLINE if rank == placement.rank():NEWLINE start_pos = current_offsetsNEWLINE breakNEWLINE current_offsets += chunk_sizeNEWLINE return start_pos, chunk_sizeNEWLINE
from holoviews.element.comparison import ComparisonTestCaseNEWLINEfrom pyviz_comms import Comm, JupyterCommNEWLINENEWLINENEWLINEclass TestComm(ComparisonTestCase):NEWLINENEWLINE def test_init_comm(self):NEWLINE Comm()NEWLINENEWLINE def test_init_comm_id(self):NEWLINE comm = Comm(id='Test')NEWLINE self.assertEqual(comm.id, 'Test')NEWLINENEWLINE def test_decode(self):NEWLINE msg = 'Test'NEWLINE self.assertEqual(Comm.decode(msg), msg)NEWLINENEWLINE def test_handle_message_error_reply(self):NEWLINE def raise_error(msg=None, metadata=None):NEWLINE raise Exception('Test')NEWLINE def assert_error(msg=None, metadata=None):NEWLINE self.assertEqual(metadata['msg_type'], "Error")NEWLINE self.assertTrue(metadata['traceback'].endswith('Exception: Test'))NEWLINE comm = Comm(id='Test', on_msg=raise_error)NEWLINE comm.send = assert_errorNEWLINE comm._handle_msg({})NEWLINENEWLINE def test_handle_message_ready_reply(self):NEWLINE def assert_ready(msg=None, metadata=None):NEWLINE self.assertEqual(metadata, {'msg_type': "Ready", 'content': ''})NEWLINE comm = Comm(id='Test')NEWLINE comm.send = assert_readyNEWLINE comm._handle_msg({})NEWLINENEWLINE def test_handle_message_ready_reply_with_comm_id(self):NEWLINE def assert_ready(msg=None, metadata=None):NEWLINE self.assertEqual(metadata, {'msg_type': "Ready", 'content': '',NEWLINE 'comm_id': 'Testing id'})NEWLINE comm = Comm(id='Test')NEWLINE comm.send = assert_readyNEWLINE comm._handle_msg({'comm_id': 'Testing id'})NEWLINENEWLINENEWLINEclass TestJupyterComm(ComparisonTestCase):NEWLINENEWLINE def test_init_comm(self):NEWLINE JupyterComm()NEWLINENEWLINE def test_init_comm_id(self):NEWLINE comm = JupyterComm(id='Test')NEWLINE self.assertEqual(comm.id, 'Test')NEWLINENEWLINE def test_decode(self):NEWLINE msg = {'content': {'data': 'Test'}}NEWLINE decoded = JupyterComm.decode(msg)NEWLINE self.assertEqual(decoded, 'Test')NEWLINENEWLINE def test_on_msg(self):NEWLINE def raise_error(msg):NEWLINE if msg == 'Error':NEWLINE raise Exception()NEWLINE comm = JupyterComm(id='Test', on_msg=raise_error)NEWLINE with self.assertRaises(Exception):NEWLINE comm._handle_msg({'content': {'data': 'Error'}})NEWLINE
# Functions to load ODELAY generated MATFILESNEWLINENEWLINE# Internal LibrariesNEWLINEimport clickNEWLINEimport csvNEWLINEfrom datetime import datetimeNEWLINEimport jsonNEWLINEimport osNEWLINEimport pathlibNEWLINEimport reNEWLINEimport sysNEWLINEimport timeNEWLINENEWLINE# External LibrariesNEWLINEimport cv2NEWLINEfrom fast_histogram import histogram1dNEWLINEimport h5pyNEWLINEimport scipy.io as sioNEWLINEimport matplotlib.pyplot as pltNEWLINEimport numpy as npNEWLINEimport pandas as pdNEWLINENEWLINEimport tools.imagepl as oplNEWLINEimport tools.odelayplot as odpNEWLINEfrom PyQt5.QtWidgets import QApplication, QWidget, QInputDialog, QFileDialogNEWLINEfrom PyQt5.QtCore import QDirNEWLINENEWLINEdef getStrings(str_array):NEWLINE '''Make a str_array into character list'''NEWLINENEWLINE return ''.join(chr(c) for c in str_array)NEWLINENEWLINEdef getHDF5str(str_array):NEWLINE '''Generate a string array from individual bchar values'''NEWLINENEWLINE return ''.join(c.astype(str) for c in str_array.squeeze())NEWLINENEWLINEdef _mat_check_keys(d):NEWLINE '''NEWLINE checks if entries in dictionary are mat-objects. If yesNEWLINE todict is called to change them to nested dictionariesNEWLINE '''NEWLINE for key in d:NEWLINE if isinstance(d[key], sio.matlab.mio5_params.mat_struct):NEWLINE d[key] = _mattodict(d[key])NEWLINE return dNEWLINENEWLINEdef _mattodict(matobj):NEWLINE '''NEWLINE A recursive function which constructs from matobjects nested dictionariesNEWLINE '''NEWLINE d = {}NEWLINE for strg in matobj._fieldnames:NEWLINE elem = matobj.__dict__[strg]NEWLINE if isinstance(elem, sio.matlab.mio5_params.mat_struct):NEWLINE d[strg] = _mattodict(elem)NEWLINE NEWLINE else:NEWLINE d[strg] = elemNEWLINE return dNEWLINENEWLINEdef _mattolist(ndarray):NEWLINE '''NEWLINE A recursive function which constructs lists from cellarraysNEWLINE (which are loaded as numpy ndarrays), recursing into the elementsNEWLINE if they contain matobjects.NEWLINE '''NEWLINE elem_list = []NEWLINE for sub_elem in ndarray:NEWLINE if isinstance(sub_elem, sio.matlab.mio5_params.mat_struct):NEWLINE elem_list.append(_mattodict(sub_elem))NEWLINE elif isinstance(sub_elem, np.ndarray):NEWLINE elem_list.append(_mattolist(sub_elem))NEWLINE else:NEWLINE elem_list.append(sub_elem)NEWLINE return elem_listNEWLINENEWLINEdef _mat_parseData(group_Obj, hd5_file):NEWLINE '''NEWLINE Matlab -v7.3 data parser. If the data is a object then it could be a cell array or a nested structure.NEWLINE '''NEWLINE group_Attrs = dir(group_Obj)NEWLINE attrs_Keys = [key for key in group_Obj.attrs.keys()]NEWLINE numeric_list = ['double', 'uint8','uint16','logical','int8', 'int16']NEWLINENEWLINE if 'dtype' in group_Attrs:NEWLINE # First test for Objects if the object reference NEWLINE if group_Obj.dtype == 'object':NEWLINE data = _matcell_todict(group_Obj,hd5_file)NEWLINENEWLINE elif 'MATLAB_class' in attrs_Keys:NEWLINE # Luckily they put the MATLAB_class in attributes so that can define the data type as numeric char or cellsNEWLINE group_attrs = group_Obj.attrs['MATLAB_class'].decode('utf-8')NEWLINE data = []NEWLINENEWLINE if group_attrs in numeric_list:NEWLINE data.append(np.array(group_Obj, dtype = group_Obj.dtype).squeeze())NEWLINENEWLINE elif group_attrs == 'char':NEWLINE data.append(getStrings(group_Obj))NEWLINE NEWLINE elif group_attrs == 'cell':NEWLINE del dataNEWLINE data = {}NEWLINE data = _matcell_todict(group_Obj, hd5_file) NEWLINE NEWLINE return dataNEWLINENEWLINEdef _mat_hd5ToDict(group_Obj, hd5_file):NEWLINE '''NEWLINE Import MATLAB hd5f files from MATLAB -v7.3. This will parse the data into nested python dictionaries NEWLINE '''NEWLINE group_Dict = {}NEWLINE for name, elem in group_Obj.items():NEWLINE # Iterate through items and check if they are groups or datasetsNEWLINE NEWLINE if type(elem) is h5py.Group:NEWLINE group_Dict[name] = _mat_hd5ToDict(elem, hd5_file)NEWLINE NEWLINE elif type(elem) is h5py.Dataset:NEWLINE group_Dict[name] = _mat_parseData(elem, hd5_file)NEWLINENEWLINE return group_DictNEWLINENEWLINEdef _matcell_todict(group_Obj,hd5_file):NEWLINENEWLINE objShape = group_Obj.shapeNEWLINENEWLINE if (objShape[0] == 1 or objShape[1] == 1):NEWLINE data= []NEWLINE for objRef in group_Obj:NEWLINE for ref_elem in objRef:NEWLINE if hd5_file[ref_elem].dtype == 'object':NEWLINE data.append(_matcell_todict(hd5_file[ref_elem], hd5_file))NEWLINE else:NEWLINE data.append(_mat_parseData(hd5_file[ref_elem], hd5_file))NEWLINENEWLINE else:NEWLINE data = {}NEWLINE for row in range(objShape[1]):NEWLINE name = getStrings(hd5_file[group_Obj[0][row]])NEWLINE str_array = []NEWLINE NEWLINE for col in range(1,objShape[0]):NEWLINE str_array.append(getStrings(hd5_file[group_Obj[col][row]]))NEWLINE NEWLINE data[name] = str_arrayNEWLINENEWLINE return dataNEWLINENEWLINEdef _decomment(csvfile):NEWLINE for row in csvfile:NEWLINE raw = row.split('#')[0].strip()NEWLINE if raw: yield rawNEWLINENEWLINEdef _saveDict(dic, hdf5Obj):NEWLINE """NEWLINE Recursively save a python dictionary to a HDF5 file. NEWLINE input: dic - is the dictionary to saveNEWLINE hdf5Obj - is the current hdf5 group or file handle to save the data to. NEWLINE """NEWLINE for key, item in dic.items():NEWLINENEWLINE NEWLINE if isinstance(item, (np.ndarray, np.int32, np.uint16, np.int64, np.float64)):NEWLINENEWLINE hdf5Obj.create_dataset(key, data = item)NEWLINENEWLINE elif isinstance(item, (bytes, bool, int, float)):NEWLINENEWLINE hdf5Obj.create_dataset(key, data = np.array(item, dtype=type(item)))NEWLINENEWLINENEWLINE elif isinstance(item, str):NEWLINE # breakpoint()NEWLINE asciiList = [el.encode("ascii", "ignore") for el in item]NEWLINE hdf5Obj.create_dataset(key, (len(asciiList),1),'S10', asciiList)NEWLINENEWLINE elif isinstance(item, dict):NEWLINE # if the item is a dictionary, create a new hdf5 group and then recursively continueNEWLINE grpObj = hdf5Obj.create_group(key)NEWLINE _saveDict(item, grpObj)NEWLINENEWLINE else:NEWLINE raise ValueError('Cannot save %s of %s type' %(key,type(item)))NEWLINENEWLINEdef _getHDF5Data(hdf5Obj):NEWLINE """NEWLINE Recursively load data from HDF5 file. NEWLINE """NEWLINE d = {}NEWLINE for key, item in hdf5Obj.items():NEWLINE NEWLINE if isinstance(item, h5py.Dataset):NEWLINE if item.dtype == 'S10':NEWLINE d[key] = getHDF5str(item[()])NEWLINE else: NEWLINE NEWLINE d[key] = item[()]NEWLINE NEWLINE elif isinstance(item, h5py.Group):NEWLINE d[key] = _getHDF5Data(item)NEWLINENEWLINE return dNEWLINENEWLINEdef loadmatlab(filename):NEWLINE '''NEWLINE loadmat opens MATLAB® formatted binary file (MAT-file) NEWLINE loadmat should be called instead of direct sio.loadmat because it recoveres NEWLINE python dictionaries from mat files. It calls the function _check_keys to curate all entriesNEWLINE which are still mat-objectsNEWLINE '''NEWLINE data = sio.loadmat(filename, struct_as_record=False, squeeze_me=True, verify_compressed_data_integrity=False)NEWLINE return _mat_check_keys(data)NEWLINENEWLINEdef math5py(filename):NEWLINE '''NEWLINE Load HDF5 saved file in ODELAY. Not all files are this type so it tends NEWLINE to be problematicNEWLINE '''NEWLINE f = h5py.File(filename,'r') NEWLINE d = _mat_hd5ToDict(f,f)NEWLINE f.close()NEWLINE return dNEWLINENEWLINEdef saveDict(fileName, saveDic):NEWLINENEWLINE with h5py.File(fileName,'w') as hdf5Obj:NEWLINE _saveDict(saveDic, hdf5Obj)NEWLINENEWLINEdef saveROI(fileLocation, roiDict):NEWLINE '''Steps to save a file:NEWLINE 1. Save HFD5 file from entered dictionaryNEWLINE '''NEWLINE filePath = pathlib.Path(fileLocation) NEWLINE if 'roiLabel' in roiDict.keys():NEWLINENEWLINE fileName = filePath /(roiDict['roiLabel'] +'.hdf5')NEWLINENEWLINE with h5py.File(fileName,'w') as hdf5Obj:NEWLINE _saveDict(roiDict, hdf5Obj)NEWLINE NEWLINE return NoneNEWLINENEWLINEdef loadData(filename):NEWLINE """NEWLINE Load data from HDF5 file into a python DictionaryNEWLINE This function will attempt to load MATLAB *.mat files based on thier NEWLINE file names.NEWLINE There maybe problems with the dictionary returned in that it may need NEWLINE to be squeezed.NEWLINE """NEWLINE d = {}NEWLINENEWLINE if '.mat' in filename.name:NEWLINE NEWLINE try:NEWLINE d = math5py(filename) NEWLINE except:NEWLINE d = loadmatlab(filename)NEWLINENEWLINE elif '.hdf5' in filename.name:NEWLINE with h5py.File(filename, 'r') as hdf5Obj:NEWLINE d = _getHDF5Data(hdf5Obj)NEWLINE NEWLINE return dNEWLINENEWLINEdef summarizeMatLabExp(dataPath, saveSwitch):NEWLINE expDict = {}NEWLINE if isinstance(dataPath, str):NEWLINE dataPath = pathlib.Path(dataPath)NEWLINE else:NEWLINE dataPath = dataPathNEWLINE NEWLINE checkPath = dataPath / 'ODELAY Well Data'NEWLINE NEWLINE expIndexPath = list(dataPath.glob('*Index_ODELAYData.mat'))NEWLINE NEWLINE if pathlib.Path.exists(checkPath):NEWLINE expData = loadData(expIndexPath[0])NEWLINE NEWLINE expName = expData['ExperimentName'][0]NEWLINE savePath = dataPath /f'{expName} summary.hdf5'NEWLINENEWLINE fileList = sorted(checkPath.glob('*.mat'))NEWLINE NEWLINE expDict = {}NEWLINE with click.progressbar(fileList) as fileBar:NEWLINE for dataFile in fileBar:NEWLINENEWLINE roiData = loadData(dataFile)NEWLINENEWLINE roi = roiData['WellDataTemp']['WellID']NEWLINE if 'FitDataGompDT' in list(roiData['Tracks2Temp']['ObjectInfo'].keys()):NEWLINE fitDataKey = 'FitDataGompDT'NEWLINE colList = ['a', 'b', 't-lag', 'dT', 'fssq', 'Tlag', 'Td', 'Tex', 'ATex', 'Aplateau', 'TdFlag', 'TexFlag', 'TVmax', 'Tplat','exitflag','fssq per obs','empty']NEWLINE fitDataCols = {colList[val] : val for val in range(len(colList))}NEWLINE NEWLINE elif 'FitDataGomp' in list(roiData['Tracks2Temp']['ObjectInfo'].keys()):NEWLINE fitDataKey = 'FitDataGomp'NEWLINE colList = ['a', 'b', 'vmax', 't-lag', 'fssq', 'Tlag', 'Td', 'Tex', 'ATex', 'Aplateau', 'TdFlag', 'TexFlag', 'TVmax', 'Tplat','exitflag','fssq per obs','empty']NEWLINE fitDataCols = {colList[val] : val for val in range(len(colList))}NEWLINENEWLINE elif 'FitData' in list(roiData['Tracks2Temp']['ObjectInfo'].keys()):NEWLINE fitDataKey = 'FitData'NEWLINE colList = ['a', 'b', 'c', 'd', 'fssq', 'Tlag', 'Td', 'Tex', 'ATex', 'Aplateau', 'TdFlag', 'TexFlag', 'TVmax', 'Tplat','exitflag','fssq per obs','empty']NEWLINE fitDataCols = {colList[val] : val for val in range(len(colList))}NEWLINENEWLINE NEWLINE NEWLINE rc = roiData['Tracks2Temp']['ObjectInfo'][fitDataKey].shapeNEWLINE NEWLINE idVec = np.arange(rc[0], dtype = 'uint32')NEWLINE inds = ~np.isnan(roiData['Tracks2Temp']['ObjectInfo'][fitDataKey][:,1])NEWLINE NEWLINE roiDict = {}NEWLINE try:NEWLINE roiDict['fitDataCols']= fitDataColsNEWLINE roiDict['objectArea'] = roiData['Tracks2Temp']['ObjectInfo']['ObjectArea'][inds,:]NEWLINE roiDict['timePoints'] = roiData['Tracks2Temp']['ObjectInfo']['TimePoints']NEWLINE roiDict['fitData'] = roiData['Tracks2Temp']['ObjectInfo'][fitDataKey][inds,:]NEWLINE roiDict['objID'] = idVec[inds]NEWLINE except IndexError:NEWLINE roiDict['fitDataCols']= fitDataColsNEWLINE roiDict['objectArea'] = roiData['Tracks2Temp']['ObjectInfo']['ObjectArea']NEWLINE roiDict['timePoints'] = roiData['Tracks2Temp']['ObjectInfo']['TimePoints']NEWLINE roiDict['fitData'] = roiData['Tracks2Temp']['ObjectInfo'][fitDataKey]NEWLINE roiDict['objID'] = idVecNEWLINE NEWLINENEWLINE roiDict['roi'] = roiNEWLINE roiDict['roiInfo'] = {}NEWLINE NEWLINE expDict[roi] = roiDictNEWLINENEWLINE spotlayoutPath = [*dataPath.glob('*ODELAYExpDisc.xlsx')]NEWLINE if len(spotlayoutPath)==1:NEWLINE NEWLINE strainID = pd.read_excel(spotlayoutPath[0], sheet_name='Sheet1', header=29, usecols="B:J").set_index('ODELAY Well')NEWLINE columnID = 'Strain ID'NEWLINE strainInfo1 = 'Plot Name'NEWLINE strainInfo2 = 'Misc1'NEWLINE NEWLINE for roi in expDict.keys():NEWLINE NEWLINE expDict[roi]['roiInfo'][columnID] = f'{strainID.loc[roi][columnID]}-{strainID.loc[roi][strainInfo1]}-{strainID.loc[roi][strainInfo2]}-{roi}'NEWLINENEWLINE NEWLINE if saveSwitch:NEWLINE saveDict(savePath, expDict)NEWLINENEWLINE else:NEWLINE print('Could Not Find ODELAY Data Folder')NEWLINENEWLINE return expDictNEWLINENEWLINEdef summarizeExp(dataPath, saveSwitch):NEWLINENEWLINE if isinstance(dataPath, str):NEWLINE dataPath = pathlib.Path(dataPath)NEWLINE else:NEWLINE dataPath = dataPathNEWLINENEWLINE indexList = [k for k in dataPath.glob('*Index_ODELAYData.*')]NEWLINE if len(indexList)==1:NEWLINE expIndexPath = dataPath / indexList[0]NEWLINE else:NEWLINE print('Could not find the correct index file or there were more than one in the diretory')NEWLINENEWLINE expData = loadData(expIndexPath)NEWLINENEWLINE expName = expData['experiment_name']NEWLINE savePath = dataPath /f'{expName} summary.hdf5'NEWLINENEWLINE roiList = list(expData['roiFiles'].keys())NEWLINE roiList.sort()NEWLINENEWLINE # generate nested dictionary of FitData, ObjectArea, and TimePointsNEWLINE expDict = {}NEWLINE with click.progressbar(roiList) as roiBar:NEWLINE for roi in roiBar:NEWLINE roiPath = dataPath / 'ODELAY Roi Data' / f'{roi}.hdf5'NEWLINE if roiPath.exists():NEWLINE roiData = loadData(roiPath)NEWLINE rc = roiData['fitData'].shapeNEWLINE idVec = np.arange(rc[0], dtype = 'uint32')NEWLINE inds = roiData['fitData'][:,0]>0NEWLINE roiDict = {}NEWLINE roiDict['fitDataCols']= roiData['fitDataCols']NEWLINE roiDict['fitData'] = roiData['fitData'][inds,:]NEWLINE roiDict['objectArea'] = roiData['objectArea'][inds,:]NEWLINE roiDict['timePoints'] = roiData['timePoints']NEWLINE roiDict['objID'] = idVec[inds]NEWLINE roiDict['roi'] = roiNEWLINE roiDict['roiInfo'] = {}NEWLINENEWLINE expDict[roi] = roiDictNEWLINENEWLINENEWLINE spotlayoutPath = [*dataPath.glob('*Spot-Layout.xlsx')]NEWLINE NEWLINE if len(spotlayoutPath)==1:NEWLINE columnID = 'Strain ID'NEWLINE strainID = pd.read_excel(spotlayoutPath[0], sheet_name='Sheet1', header=0, usecols="A:L").set_index('ROI')NEWLINE columnList = ['Strain ID','Strain Info 1','Strain Info 2' ]NEWLINE for roi in expDict.keys():NEWLINENEWLINE strainInfo = ' '.join(str(strainID.loc[roi][colID]) for colID in columnList) + ' ' + roiNEWLINE expDict[roi]['roiInfo'][columnID] = strainInfoNEWLINE NEWLINE if saveSwitch:NEWLINE saveDict(savePath, expDict)NEWLINENEWLINE return expDictNEWLINENEWLINEdef exportcsv(dataPath, roiList=None):NEWLINE ''' Export csv files of object area and fit data. All data is exported including non-fit data.'''NEWLINE # TODO: Add ability to filter data and export data label vectors.NEWLINE # TODO: Add ability to look for summary data to shorten export timNEWLINENEWLINE dateString = datetime.today().strftime("%Y-%m-%d")NEWLINE directoryName = f'{dateString}_csvOut'NEWLINENEWLINE saveLocation = dataPath / directoryNameNEWLINE if not saveLocation.exists():NEWLINE saveLocation.mkdir()NEWLINE NEWLINE summaryList = list(dataPath.glob('*summary.hdf5'))NEWLINE NEWLINE if len(summaryList)==1:NEWLINE expDict = loadData(summaryList[0])NEWLINE print('summary loaded')NEWLINENEWLINE for roi in expDict.keys():NEWLINE roiData = expDict[roi]NEWLINE NEWLINE rc = roiData['fitData'].shapeNEWLINE idVec = np.arange(rc[0], dtype = 'uint32')NEWLINE inds = roiData['fitData'][:,0]>0NEWLINENEWLINE timePoints = roiData['timePoints']NEWLINE objectArea = roiData['objectArea'][inds,:]NEWLINE NEWLINE fitDataCols = roiData['fitDataCols']NEWLINE fitDataHeader = [key for key, value in sorted(fitDataCols.items(), key=lambda item: item[1])]NEWLINENEWLINE fitData = roiData['fitData'][inds,:len(fitDataHeader)]NEWLINENEWLINE fitDataFrame = pd.DataFrame(fitData, columns = fitDataHeader)NEWLINE objAreaFrame = pd.DataFrame(objectArea, columns = timePoints)NEWLINE fitDataFrame['object ID'] = roiData['objID']NEWLINE NEWLINE NEWLINE objArea_csv_Path = saveLocation / f'{roi}-objectArea.csv'NEWLINE fitData_csv_Path = saveLocation / f'{roi}-FitData.csv'NEWLINENEWLINE fitDataFrame.to_csv(fitData_csv_Path, index = None, header=True)NEWLINE objAreaFrame.to_csv(objArea_csv_Path, index = None, header=True)NEWLINE NEWLINE else:NEWLINE for roi in roiList:NEWLINE roiPath = dataPath / 'ODELAY Roi Data' / f'{roi}.hdf5'NEWLINE if roiPath.exists():NEWLINE roiData = loadData(roiPath)NEWLINENEWLINE rc = roiData['fitData'].shapeNEWLINE idVec = np.arange(rc[0], dtype = 'uint32')NEWLINE inds = roiData['fitData'][:,0]>0NEWLINENEWLINE timePoints = roiData['timePoints']NEWLINE objectArea = roiData['objectArea'][inds,:]NEWLINE fitData = roiData['fitData'][inds,0:15]NEWLINE fitDataCols = roiData['fitDataCols']NEWLINE fitDataHeader = [key for key, value in sorted(fitDataCols.items(), key=lambda item: item[1])]NEWLINE NEWLINE fitDataFrame = pd.DataFrame(fitData, columns = fitDataHeader)NEWLINE objAreaFrame = pd.DataFrame(objectArea, columns = timePoints)NEWLINE fitDataFrame['object ID'] = idVec[inds]NEWLINE objAreaFrame['object ID'] = idVec[inds]NEWLINENEWLINE objArea_csv_Path = saveLocation / f'{roi}-objectArea.csv'NEWLINE fitData_csv_Path = saveLocation / f'{roi}-FitData.csv'NEWLINENEWLINE fitDataFrame.to_csv(fitData_csv_Path, index = None, header=True)NEWLINE objAreaFrame.to_csv(objArea_csv_Path, index = None, header=True)NEWLINE else:NEWLINE print(f"{roi} did not process as its data file doesn't exist")NEWLINENEWLINE return NoneNEWLINENEWLINEdef exportavi( imagepath, datapath, roi, objID = None):NEWLINENEWLINE '''Write XVID encoded *.avi movie for timecourse images.'''NEWLINENEWLINENEWLINE dataPath = pathlib.Path(datapath)NEWLINE imagePath = pathlib.Path(imagepath)NEWLINENEWLINE directoryName = 'ODELAY Roi AVI'NEWLINENEWLINE saveLocation = dataPath / directoryNameNEWLINE if not saveLocation.exists():NEWLINE pathlib.Path.mkdir(saveLocation)NEWLINE # '''Write an AVI file that shows the ROI over time'''NEWLINE # TODO: figure out way to zoom in on colony area, add time code, and scale barNEWLINENEWLINE indexList = [k for k in dataPath.glob('*Index_ODELAYData.*')]NEWLINE if len(indexList)==1:NEWLINE expIndexPath = dataPath / indexList[0]NEWLINE else:NEWLINE print('Could not find the correct index file or there were more than one in the diretory')NEWLINENEWLINE expIndex = loadData(expIndexPath)NEWLINENEWLINE roiPath = dataPath / 'ODELAY Roi Data' / f'{roi}.hdf5'NEWLINE roiData = loadData(roiPath)NEWLINENEWLINE imageList = list(expIndex['roiFiles'][roi].keys())NEWLINE imageList.sort(key=lambda var:[int(x) if x.isdigit() else x for x in re.findall(r'[^0-9]|[0-9]+', var)])NEWLINE numImage = len(imageList)NEWLINE # Determin size of stitched imageNEWLINENEWLINE stitchDim = np.zeros((numImage,2), dtype = 'float')NEWLINE magnification = expIndex['magnification']NEWLINE pixSize = expIndex['pixSize']NEWLINE background = expIndex['backgroundImage']NEWLINENEWLINE for ai in roiData['stitchMeta'].keys():NEWLINE stitchDim[int(ai),:] = roiData['stitchMeta'][ai]['stitchDim']NEWLINENEWLINE vidDim = np.median(stitchDim,0).astype('uint')NEWLINE NEWLINE fps = 10.0NEWLINENEWLINE vidFileName = str(saveLocation / f'{roi}.avi')NEWLINENEWLINE fcc = cv2.VideoWriter_fourcc(*'XVID')NEWLINENEWLINE aviOut = cv2.VideoWriter(vidFileName,fcc, fps, (vidDim[1],vidDim[0]),1)NEWLINE imInd = 0NEWLINE NEWLINE NEWLINE for im in imageList:NEWLINENEWLINE # load imageNEWLINE # rectify umageDimsNEWLINE # Adjust Contrast to uint8NEWLINE # write frame in aviNEWLINE roiKey = f'{imInd:03d}'NEWLINE imPix = roiData['stitchMeta'][roiKey]['imPix']NEWLINE imageFilePath = imagePath / roi / imNEWLINE rawImage = opl.assembleImage(imageFilePath, pixSize, magnification, background, imPix)NEWLINENEWLINE # Generate histogram of the loaded imageNEWLINE imageHist = histogram1d(rawImage['Bf'].ravel(),2**16,[0,2**16],weights = None).astype('float')NEWLINE # Calculate the cumulative probability ignoring zero values NEWLINE cumHist = np.cumsum(imageHist)NEWLINE cumProb = (cumHist-cumHist[0])/(cumHist[2**16-1]-cumHist[0])NEWLINE # set low and high values ot normalize image contrast. NEWLINE loval = np.argmax(cumProb>0.00001)NEWLINE hival = np.argmax(cumProb>=0.9995)NEWLINENEWLINE rc = np.min((stitchDim[imInd,:],vidDim),axis=0).astype('int')NEWLINE adjIm = np.zeros((vidDim[0],vidDim[1]), dtype = 'float')NEWLINE adjIm[:rc[0],:rc[1]] = (rawImage['Bf'][:rc[0],:rc[1]].astype('float') - loval.astype('float'))/(hival.astype('float') - loval.astype('float'))*254NEWLINE lim = np.iinfo('uint8')NEWLINE scIm = np.clip(adjIm, lim.min, lim.max) NEWLINENEWLINE vidIm = np.stack([scIm, scIm, scIm], axis=2).astype('uint8')NEWLINENEWLINE aviOut.write(vidIm)NEWLINE imInd +=1NEWLINE NEWLINE NEWLINE aviOut.release()NEWLINE NEWLINENEWLINE NEWLINE return NoneNEWLINENEWLINEdef exporttiffs(imagepath, datapath, roi, objID = None):NEWLINE '''NEWLINE Data from Experiment Dictionary or ObjectNEWLINE '''NEWLINE if isinstance(imagepath, str):NEWLINE imagePath = pathlib.Path(imagepath)NEWLINE else:NEWLINE imagePath = imagepathNEWLINENEWLINE if isinstance(datapath, str):NEWLINE dataPath = pathlib.Path(datapath)NEWLINE else:NEWLINE dataPath = datapathNEWLINENEWLINE indexList = [*dataPath.glob('*Index_ODELAYData.*')]NEWLINENEWLINE if len(indexList)==1:NEWLINE expIndexPath = dataPath / indexList[0]NEWLINE else:NEWLINE print('Could not find the correct index file or there were more than one in the diretory')NEWLINENEWLINE expData = loadData(expIndexPath)NEWLINE #####################################NEWLINE # Load Dictionary variables There has to be a way to dynamically add theseNEWLINE #####################################NEWLINE background = expData['backgroundImage']NEWLINE numTimePoints = expData['numTimePoints'] # number of timepontsNEWLINE pixSize = expData['pixSize']NEWLINE magnification = expData['magnification']NEWLINE roiFiles = expData['roiFiles']NEWLINE odelayDataPath = dataPath / 'ODELAY Roi Data'NEWLINENEWLINE # Else this will crashNEWLINE NEWLINE roiList = [*roiFiles]NEWLINENEWLINE tiffPath = dataPath / 'ODELAY Tiff Images'NEWLINE if not tiffPath.exists():NEWLINE tiffPath.mkdir()NEWLINE NEWLINE if roi in roiList:NEWLINE roiPath = imagePath / roiNEWLINE fileList = os.listdir(roiPath)NEWLINE imageFileList = [fileName for fileName in fileList if '.mat' in fileName]NEWLINE # Understand this gem of a regular expression sort.NEWLINE imageFileList.sort(key=lambda var:[int(x) if x.isdigit() else x for x in re.findall(r'[^0-9]|[0-9]+', var)])NEWLINE numImages = len(imageFileList)NEWLINE NEWLINE tiffRoiPath = tiffPath / roiNEWLINE if not tiffRoiPath.exists():NEWLINE tiffRoiPath.mkdir()NEWLINENEWLINE # Start Processing Data HereNEWLINE for aI in range(numImages):NEWLINENEWLINE imageFilePath = roiPath / imageFileList[aI]NEWLINE NEWLINE anImage = opl.stitchImage(imageFilePath, pixSize, magnification, background)NEWLINE NEWLINE for imlbl in anImage['imageLabels'].keys():NEWLINE saveFilePath = tiffRoiPath / f'{roi}_{imlbl}_{aI+1:03d}.tiff'NEWLINE uint8Image = odp.scaleImage(anImage[imlbl])NEWLINE retVal = cv2.imwrite(str(saveFilePath), uint8Image)NEWLINE NEWLINE NEWLINE return NoneNEWLINENEWLINEdef readExpDisc(dataPath):NEWLINENEWLINE '''Reads formatted excell spreadsheet and returns a dataframe with the experiment orgainsation.'''NEWLINENEWLINE spotlayoutPath = [*dataPath.glob('*Spot-Layout.xlsx')]NEWLINE if len(spotlayoutPath)==1:NEWLINE strainID = pd.read_excel(spotlayoutPath, sheet_name='Sheet1', header=0, usecols="A:L").set_index('ROI')NEWLINENEWLINE return strainIDNEWLINENEWLINEdef setdatadir(loc_data_dir):NEWLINE '''Set the directory where processed ODELAY data is loaded/saved'''NEWLINE configfilePath = pathlib.Path( pathlib.Path.home() / '.odelayconfig' )NEWLINENEWLINE with open(configfilePath, 'r') as fileIn:NEWLINE odelayConfig = json.load(fileIn)NEWLINENEWLINE localDataPath = pathlib.Path(loc_data_dir)NEWLINENEWLINE if localDataPath.exists:NEWLINE resolvedPath = localDataPath.resolve()NEWLINE LocalDataPathstr = str(resolvedPath)NEWLINENEWLINE HPCDataPath = LocalDataPathstr.replace('\\','/').replace('//helens','/')NEWLINENEWLINE odelayConfig['LocalDataDir'] = loc_data_dirNEWLINE odelayConfig['HPCDataDir'] = HPCDataPath NEWLINE print(f'Data Directory path from local computer is: {loc_data_dir}')NEWLINE print(f'Data Directory path from HPC computer is: {HPCDataPath}')NEWLINE NEWLINENEWLINE odelayConfig['PathCheck'] = FalseNEWLINENEWLINE with open(configfilePath, 'w') as fileOut:NEWLINE json.dump(odelayConfig, fileOut)NEWLINENEWLINE return resolvedPathNEWLINENEWLINEdef setimagedir(loc_image_dir):NEWLINE '''Set the directory where the experiment's images are located'''NEWLINENEWLINE configfilePath = pathlib.Path( pathlib.Path.home() / '.odelayconfig' )NEWLINENEWLINE with open(configfilePath, 'r') as fileIn:NEWLINE odelayConfig = json.load(fileIn)NEWLINENEWLINE localImagePath = pathlib.Path(loc_image_dir)NEWLINE NEWLINE if localImagePath.exists():NEWLINE NEWLINE resolvedPath = pathlib.Path(loc_image_dir).resolve()NEWLINE LocalImagePathstr = str(resolvedPath)NEWLINE NEWLINE HPCImagePath = LocalImagePathstr.replace('\\','/').replace('//pplhpc1ces','/gpfs/scratch')NEWLINENEWLINE odelayConfig['LocalImageDir'] = loc_image_dirNEWLINE odelayConfig['HPCImageDir'] = HPCImagePath NEWLINE print(f'Image Directory path from local computer is: {loc_image_dir}')NEWLINE print(f'Image Directory path from HPC computer is: {HPCImagePath}')NEWLINENEWLINENEWLINE odelayConfig['PathCheck'] = FalseNEWLINE NEWLINE with open(configfilePath, 'w') as fileOut:NEWLINE json.dump(odelayConfig, fileOut)NEWLINENEWLINENEWLINE return resolvedPathNEWLINENEWLINEdef loadConfig():NEWLINE configfilePath = pathlib.Path( pathlib.Path.home() / '.odelayconfig' )NEWLINENEWLINE with open(configfilePath, 'r') as fileIn:NEWLINE odelayConfig = json.load(fileIn)NEWLINENEWLINE return odelayConfigNEWLINENEWLINEdef saveConfig(odelayConfig):NEWLINENEWLINE configfilePath = pathlib.Path( pathlib.Path.home() / '.odelayconfig' )NEWLINE odelayConfig['PathCheck'] = FalseNEWLINENEWLINE with open(configfilePath, 'w') as fileOut:NEWLINE json.dump(odelayConfig, fileOut)NEWLINENEWLINE return odelayConfigNEWLINENEWLINEdef readMMConfigFile(filePath):NEWLINENEWLINE configFiltPath = pathlib.Path(filePath)NEWLINENEWLINE configList = []NEWLINENEWLINE configDict = {NEWLINE 'Device':{},NEWLINE 'Parent':{},NEWLINE 'Label':{},NEWLINE 'Group':{}NEWLINE }NEWLINENEWLINE with open(configFilePath) as csvfile:NEWLINE reader = csv.reader(_decomment(csvfile))NEWLINE for row in reader:NEWLINE configList.append(row)NEWLINENEWLINE for row in configList:NEWLINE NEWLINE if row[0] == 'Device':NEWLINE configDict['Device'].update({row[1]: [row[2],row[3]]})NEWLINENEWLINE elif row[0] == 'Parent':NEWLINE configDict['Parent'].update({row[1]:row[2]})NEWLINENEWLINE elif row[0] == 'Label':NEWLINE if row[1] in testDict[row[0]].keys():NEWLINE configDict[row[0]][row[1]].update({row[2]:row[3]})NEWLINENEWLINE else:NEWLINE configDict[row[0]].update({row[1]:{row[2]:row[3]}})NEWLINENEWLINE elif row[0] == 'Group':NEWLINE configDict['Group'].update({row[1]:{row[2]: row[3]}})NEWLINENEWLINE return configDictNEWLINENEWLINE# Print iterations progressNEWLINEdef printProgressBar (iteration, total, prefix = '', suffix = '', decimals = 1, length = 100, fill = '█', printEnd = "\r"):NEWLINE """NEWLINE Call in a loop to create terminal progress barNEWLINE @params:NEWLINE iteration - Required : current iteration (Int)NEWLINE total - Required : total iterations (Int)NEWLINE prefix - Optional : prefix string (Str)NEWLINE suffix - Optional : suffix string (Str)NEWLINE decimals - Optional : positive number of decimals in percent complete (Int)NEWLINE length - Optional : character length of bar (Int)NEWLINE fill - Optional : bar fill character (Str)NEWLINE printEnd - Optional : end character (e.g. "\r", "\r\n") (Str)NEWLINE """NEWLINE percent = ("{0:." + str(decimals) + "f}").format(100 * (iteration / float(total)))NEWLINE filledLength = int(length * iteration // total)NEWLINE bar = fill * filledLength + '-' * (length - filledLength)NEWLINE print('\r%s |%s| %s%% %s' % (prefix, bar, percent, suffix), end = printEnd)NEWLINE # Print New Line on CompleteNEWLINE if iteration == total: NEWLINE print()NEWLINENEWLINEdef openFileDialog():NEWLINENEWLINE app = QApplication(sys.argv)NEWLINE options = QFileDialog.Options()NEWLINE fileName, _ = QFileDialog.getOpenFileName(None,"Select ODELAY Data Set", "","ODELAYExpDisc (*Index_ODELAYData.mat);; Mat-Files (*.mat)", options=options)NEWLINE app.quit()NEWLINE return fileName
import osNEWLINEimport sysNEWLINENEWLINEINTERP = os.path.expanduser("~/venv/bin/python3")NEWLINEif sys.executable != INTERP:NEWLINE os.execl(INTERP, INTERP, *sys.argv)NEWLINENEWLINEsys.path.append(os.getcwd())NEWLINEfrom blokdata.app import app as applicationNEWLINENEWLINE# For running on the server with passenger etcNEWLINEif __name__ == "__main__":NEWLINE application.run()NEWLINE
__copyright__ = "Copyright (c) 2020 Jina AI Limited. All rights reserved."NEWLINE__license__ = "Apache-2.0"NEWLINENEWLINEimport numpy as npNEWLINENEWLINEfrom . import BaseExecutableDriverNEWLINENEWLINEif False:NEWLINE from ..types.sets import DocumentSetNEWLINENEWLINENEWLINEclass BaseIndexDriver(BaseExecutableDriver):NEWLINE """Drivers inherited from this Driver will bind :meth:`add` by default """NEWLINENEWLINE def __init__(self, executor: str = None, method: str = 'add', *args, **kwargs):NEWLINE super().__init__(executor, method, *args, **kwargs)NEWLINENEWLINENEWLINEclass VectorIndexDriver(BaseIndexDriver):NEWLINE """Extract chunk-level embeddings and add it to the executorNEWLINE """NEWLINENEWLINE def _apply_all(self, docs: 'DocumentSet', *args, **kwargs) -> None:NEWLINE embed_vecs, docs_pts, bad_docs = docs.all_embeddingsNEWLINENEWLINE if bad_docs:NEWLINE self.pea.logger.warning(f'these bad docs can not be added: {bad_docs}')NEWLINENEWLINE if docs_pts:NEWLINE self.exec_fn(np.array([hash(doc.id) for doc in docs_pts]), np.stack(embed_vecs))NEWLINENEWLINENEWLINEclass KVIndexDriver(BaseIndexDriver):NEWLINE """Serialize the documents/chunks in the request to key-value JSON pairs and write it using the executorNEWLINE """NEWLINENEWLINE def _apply_all(self, docs: 'DocumentSet', *args, **kwargs) -> None:NEWLINE keys = [hash(doc.id) for doc in docs]NEWLINE values = [doc.SerializeToString() for doc in docs]NEWLINE self.exec_fn(keys, values)NEWLINE
#!/usr/bin/env pythonNEWLINENEWLINEfrom __future__ import print_functionNEWLINENEWLINEfrom builtins import rangeNEWLINEimport osNEWLINEimport reNEWLINEimport sysNEWLINEimport globNEWLINEimport jsonNEWLINEimport mathNEWLINEimport bisectNEWLINEimport randomNEWLINEimport signalNEWLINEif sys.version_info[0]>2:NEWLINE import _pickle as cPickleNEWLINEelse:NEWLINE import cPickleNEWLINEimport difflibNEWLINEimport argparseNEWLINEimport functoolsNEWLINEimport itertoolsNEWLINEimport subprocessNEWLINEimport collectionsNEWLINEimport multiprocessingNEWLINEimport FWCore.PythonUtilities.LumiList as LumiListNEWLINEimport Utilities.General.cmssw_das_client as cmssw_das_clientNEWLINEimport Alignment.MillePedeAlignmentAlgorithm.mpslib.tools as mps_toolsNEWLINENEWLINENEWLINE################################################################################NEWLINEdef main(argv = None):NEWLINE """NEWLINE Main routine. Not called, if this module is loaded via `import`.NEWLINENEWLINE Arguments:NEWLINE - `argv`: Command line arguments passed to the script.NEWLINE """NEWLINENEWLINE if argv == None:NEWLINE argv = sys.argv[1:]NEWLINENEWLINE file_list_creator = FileListCreator(argv)NEWLINE file_list_creator.create()NEWLINENEWLINENEWLINE################################################################################NEWLINEclass FileListCreator(object):NEWLINE """Create file lists for alignment and validation for a given dataset.NEWLINE """NEWLINENEWLINE def __init__(self, argv):NEWLINE """Constructor taking the command line arguments.NEWLINENEWLINE Arguments:NEWLINE - `args`: command line argumentsNEWLINE """NEWLINENEWLINE self._first_dataset_ini = TrueNEWLINE self._parser = self._define_parser()NEWLINE self._args = self._parser.parse_args(argv)NEWLINENEWLINE if not mps_tools.check_proxy():NEWLINE print_msg(NEWLINE "Please create proxy via 'voms-proxy-init -voms cms -rfc'.")NEWLINE sys.exit(1)NEWLINENEWLINE self._dataset_regex = re.compile(r"^/([^/]+)/([^/]+)/([^/]+)$")NEWLINE self._validate_input()NEWLINENEWLINE if self._args.test_mode:NEWLINE import Configuration.PyReleaseValidation.relval_steps as rvsNEWLINE import Configuration.PyReleaseValidation.relval_production as rvpNEWLINE self._args.datasets = [rvs.steps[rvp.workflows[1000][1][0]]["INPUT"].dataSet]NEWLINE self._validate_input() # ensure that this change is validNEWLINENEWLINE self._datasets = sorted([datasetNEWLINE for pattern in self._args.datasetsNEWLINE for dataset in get_datasets(pattern)NEWLINE if re.search(self._args.dataset_filter, dataset)])NEWLINE if len(self._datasets) == 0:NEWLINE print_msg("Found no dataset matching the pattern(s):")NEWLINE for d in self._args.datasets: print_msg("\t"+d)NEWLINE sys.exit(1)NEWLINENEWLINE self._formatted_dataset = merge_strings(NEWLINE [re.sub(self._dataset_regex, r"\1_\2_\3", dataset)NEWLINE for dataset in self._datasets])NEWLINE self._output_dir = os.path.join(self._args.output_dir,NEWLINE self._formatted_dataset)NEWLINE self._output_dir = os.path.abspath(self._output_dir)NEWLINE self._cache = _DasCache(self._output_dir)NEWLINE self._prepare_iov_datastructures()NEWLINE self._prepare_run_datastructures()NEWLINENEWLINE try:NEWLINE os.makedirs(self._output_dir)NEWLINE except OSError as e:NEWLINE if e.args == (17, "File exists"):NEWLINE if self._args.force:NEWLINE pass # do nothing, just clear the existing outputNEWLINE elif self._args.use_cache:NEWLINE self._cache.load() # load cache before clearing the outputNEWLINE else:NEWLINE print_msg("Directory '{}' already exists from previous runs"NEWLINE " of the script. Use '--use-cache' if you want to"NEWLINE " use the cached DAS-query results Or use "NEWLINE "'--force' to remove it."NEWLINE .format(self._output_dir))NEWLINE sys.exit(1)NEWLINE files = glob.glob(os.path.join(self._output_dir, "*"))NEWLINE for f in files: os.remove(f)NEWLINE else:NEWLINE raiseNEWLINENEWLINENEWLINE def create(self):NEWLINE """Creates file list. To be called by user of the class."""NEWLINENEWLINE self._request_dataset_information()NEWLINE self._create_file_lists()NEWLINE self._print_eventcounts()NEWLINE self._write_file_lists()NEWLINENEWLINENEWLINE _event_count_log = "event_count_info.log"NEWLINENEWLINENEWLINE def _define_parser(self):NEWLINE """Definition of command line argument parser."""NEWLINENEWLINE parser = argparse.ArgumentParser(NEWLINE description = "Create file lists for alignment",NEWLINE epilog = ("The tool will create a directory containing all file "NEWLINE "lists and a log file with all relevant event counts "NEWLINE "('{}').".format(FileListCreator._event_count_log)))NEWLINE parser.add_argument("-i", "--input", dest = "datasets", required = True,NEWLINE metavar = "DATASET", action = "append",NEWLINE help = ("CMS dataset name; supports wildcards; "NEWLINE "use multiple times for multiple datasets"))NEWLINE parser.add_argument("--dataset-filter", default = "",NEWLINE help = "regex to match within in the datasets matched,"NEWLINE "in case the wildcard isn't flexible enough")NEWLINE parser.add_argument("-j", "--json", dest = "json", metavar = "PATH",NEWLINE help = "path to JSON file (optional)")NEWLINE parser.add_argument("-f", "--fraction", dest = "fraction",NEWLINE type = float, default = 1,NEWLINE help = "max. fraction of files used for alignment")NEWLINE parser.add_argument("--iov", dest = "iovs", metavar = "RUN", type = int,NEWLINE action = "append", default = [],NEWLINE help = ("define IOV by specifying first run; for "NEWLINE "multiple IOVs use this option multiple "NEWLINE "times; files from runs before the lowest "NEWLINE "IOV are discarded (default: 1)"))NEWLINE parser.add_argument("--miniiov", dest="miniiovs", metavar="RUN", type=int,NEWLINE action="append", default=[],NEWLINE help=("in addition to the standard IOVs, break up hippy jobs "NEWLINE "at these points, so that jobs from before and after "NEWLINE "these runs are not in the same job"))NEWLINE parser.add_argument("-r", "--random", action = "store_true",NEWLINE default = False, help = "select files randomly")NEWLINE parser.add_argument("-n", "--events-for-alignment", "--maxevents",NEWLINE dest = "events", type = int, metavar = "NUMBER",NEWLINE help = ("number of events needed for alignment; the"NEWLINE " remaining events in the dataset are used "NEWLINE "for validation; if n<=0, all events are "NEWLINE "used for validation"))NEWLINE parser.add_argument("--all-events", action = "store_true",NEWLINE help = "Use all events for alignment")NEWLINE parser.add_argument("--tracks-for-alignment", dest = "tracks",NEWLINE type = int, metavar = "NUMBER",NEWLINE help = "number of tracks needed for alignment")NEWLINE parser.add_argument("--track-rate", dest = "rate", type = float,NEWLINE metavar = "NUMBER",NEWLINE help = "number of tracks per event")NEWLINE parser.add_argument("--run-by-run", dest = "run_by_run",NEWLINE action = "store_true", default = False,NEWLINE help = "create validation file list for each run")NEWLINE parser.add_argument("--minimum-events-in-iov",NEWLINE dest = "minimum_events_in_iov", metavar = "NUMBER",NEWLINE type = int, default = 100000,NEWLINE help = ("minimum number of events for alignment per"NEWLINE " IOV; this option has a higher priority "NEWLINE "than '-f/--fraction' "NEWLINE "(default: %(default)s)"))NEWLINE parser.add_argument("--minimum-events-validation",NEWLINE dest = "minimum_events_validation",NEWLINE metavar = "NUMBER", type = int, default = 1,NEWLINE help = ("minimum number of events for validation; "NEWLINE "applies to IOVs; in case of --run-by-run "NEWLINE "it applies to runs runs "NEWLINE "(default: %(default)s)"))NEWLINE parser.add_argument("--use-cache", dest = "use_cache",NEWLINE action = "store_true", default = False,NEWLINE help = "use DAS-query results of previous run")NEWLINE parser.add_argument("-o", "--output-dir", dest = "output_dir",NEWLINE metavar = "PATH", default = os.getcwd(),NEWLINE help = "output base directory (default: %(default)s)")NEWLINE parser.add_argument("--create-ini", dest = "create_ini",NEWLINE action = "store_true", default = False,NEWLINE help = ("create dataset ini file based on the "NEWLINE "created file lists"))NEWLINE parser.add_argument("--force", action = "store_true", default = False,NEWLINE help = ("remove output directory from previous "NEWLINE "runs, if existing"))NEWLINE parser.add_argument("--hippy-events-per-job", type = int, default = 1,NEWLINE help = ("approximate number of events in each job for HipPy"))NEWLINE parser.add_argument("--test-mode", dest = "test_mode",NEWLINE action = "store_true", default = False,NEWLINE help = argparse.SUPPRESS) # hidden optionNEWLINE return parserNEWLINENEWLINENEWLINE def _validate_input(self):NEWLINE """Validate command line arguments."""NEWLINENEWLINE if self._args.events is None:NEWLINE if self._args.all_events:NEWLINE self._args.events = float("inf")NEWLINE print_msg("Using all tracks for alignment")NEWLINE elif (self._args.tracks is None) and (self._args.rate is None):NEWLINE msg = ("either -n/--events-for-alignment, --all-events, or both of "NEWLINE "--tracks-for-alignment and --track-rate are required")NEWLINE self._parser.error(msg)NEWLINE elif (((self._args.tracks is not None) and (self._args.rate is None)) orNEWLINE ((self._args.rate is not None)and (self._args.tracks is None))):NEWLINE msg = ("--tracks-for-alignment and --track-rate must be used "NEWLINE "together")NEWLINE self._parser.error(msg)NEWLINE else:NEWLINE self._args.events = int(math.ceil(self._args.tracks /NEWLINE self._args.rate))NEWLINE print_msg("Requested {0:d} tracks with {1:.2f} tracks/event "NEWLINE "-> {2:d} events for alignment."NEWLINE .format(self._args.tracks, self._args.rate,NEWLINE self._args.events))NEWLINE else:NEWLINE if (self._args.tracks is not None) or (self._args.rate is not None) or self._args.all_events:NEWLINE msg = ("-n/--events-for-alignment must not be used with "NEWLINE "--tracks-for-alignment, --track-rate, or --all-events")NEWLINE self._parser.error(msg)NEWLINE print_msg("Requested {0:d} events for alignment."NEWLINE .format(self._args.events))NEWLINENEWLINE for dataset in self._args.datasets:NEWLINE if not re.match(self._dataset_regex, dataset):NEWLINE print_msg("Dataset pattern '"+dataset+"' is not in CMS format.")NEWLINE sys.exit(1)NEWLINENEWLINE nonzero_events_per_iov = (self._args.minimum_events_in_iov > 0)NEWLINE if nonzero_events_per_iov and self._args.fraction <= 0:NEWLINE print_msg("Setting minimum number of events per IOV for alignment "NEWLINE "to 0 because a non-positive fraction of alignment events"NEWLINE " is chosen: {}".format(self._args.fraction))NEWLINE nonzero_events_per_iov = FalseNEWLINE self._args.minimum_events_in_iov = 0NEWLINE if nonzero_events_per_iov and self._args.events <= 0:NEWLINE print_msg("Setting minimum number of events per IOV for alignment "NEWLINE "to 0 because a non-positive number of alignment events"NEWLINE " is chosen: {}".format(self._args.events))NEWLINE nonzero_events_per_iov = FalseNEWLINE self._args.minimum_events_in_iov = 0NEWLINENEWLINENEWLINE def _prepare_iov_datastructures(self):NEWLINE """Create the needed objects for IOV handling."""NEWLINENEWLINE self._iovs = sorted(set(self._args.iovs))NEWLINE if len(self._iovs) == 0: self._iovs.append(1)NEWLINE self._iov_info_alignment = {iov: {"events": 0, "files": []}NEWLINE for iov in self._iovs}NEWLINE self._iov_info_validation = {iov: {"events": 0, "files": []}NEWLINE for iov in self._iovs}NEWLINENEWLINE self._miniiovs = sorted(set(self._iovs) | set(self._args.miniiovs))NEWLINENEWLINENEWLINE def _get_iovs(self, runs, useminiiovs=False):NEWLINE """NEWLINE Return the IOV start for `run`. Returns 'None' if the run is before anyNEWLINE defined IOV.NEWLINENEWLINE Arguments:NEWLINE - `runs`: run numbersNEWLINE """NEWLINENEWLINE iovlist = self._miniiovs if useminiiovs else self._iovsNEWLINENEWLINE iovs = []NEWLINE for run in runs:NEWLINE iov_index = bisect.bisect(iovlist, run)NEWLINE if iov_index > 0: iovs.append(iovlist[iov_index-1])NEWLINE return iovsNEWLINENEWLINENEWLINE def _prepare_run_datastructures(self):NEWLINE """Create the needed objects for run-by-run validation file lists."""NEWLINENEWLINE self._run_info = {}NEWLINENEWLINENEWLINE def _add_file_info(self, container, keys, fileinfo):NEWLINE """Add file with `file_name` to `container` using `key`.NEWLINENEWLINE Arguments:NEWLINE - `container`: dictionary holding information on files and event countsNEWLINE - `keys`: keys to which the info should be added; will be created if notNEWLINE existingNEWLINE - `file_name`: name of a dataset fileNEWLINE """NEWLINENEWLINE for key in keys:NEWLINE if key not in container:NEWLINE container[key] = {"events": 0,NEWLINE "files": []}NEWLINE container[key]["events"] += fileinfo.nevents / len(keys)NEWLINE if fileinfo not in container[key]["files"]:NEWLINE container[key]["files"].append(fileinfo)NEWLINENEWLINENEWLINE def _remove_file_info(self, container, keys, fileinfo):NEWLINE """Remove file with `file_name` to `container` using `key`.NEWLINENEWLINE Arguments:NEWLINE - `container`: dictionary holding information on files and event countsNEWLINE - `keys`: keys from which the info should be removedNEWLINE - `file_name`: name of a dataset fileNEWLINE - `event_count`: number of events in `file_name`NEWLINE """NEWLINENEWLINE for key in keys:NEWLINE if key not in container: continueNEWLINE try:NEWLINE index = container[key]["files"].index(fileinfo)NEWLINE except ValueError: # file not foundNEWLINE returnNEWLINE del container[key]["files"][index]NEWLINE container[key]["events"] -= fileinfo.nevents / len(keys)NEWLINENEWLINENEWLINE def _request_dataset_information(self):NEWLINE """Retrieve general dataset information and create file list."""NEWLINENEWLINE if not self._cache.empty:NEWLINE print_msg("Using cached information.")NEWLINE (self._events_in_dataset,NEWLINE self._files,NEWLINE self._file_info,NEWLINE self._max_run) = self._cache.get()NEWLINE self.rereco = any(len(fileinfo.runs)>1 for fileinfo in self._file_info)NEWLINE if self._args.random: random.shuffle(self._files)NEWLINE returnNEWLINENEWLINE # workaround to deal with KeyboardInterrupts in the worker processes:NEWLINE # - ignore interrupt signals in workers (see initializer)NEWLINE # - use a timeout of size sys.maxsize to avoid a bug in multiprocessingNEWLINE number_of_processes = multiprocessing.cpu_count() - 1NEWLINE number_of_processes = (number_of_processesNEWLINE if number_of_processes > 0NEWLINE else 1)NEWLINE pool = multiprocessing.Pool(NEWLINE processes = number_of_processes,NEWLINE initializer = lambda: signal.signal(signal.SIGINT, signal.SIG_IGN))NEWLINENEWLINE print_msg("Requesting information for the following dataset(s):")NEWLINE for d in self._datasets: print_msg("\t"+d)NEWLINE print_msg("This may take a while...")NEWLINENEWLINE result = pool.map_async(get_events_per_dataset, self._datasets).get(3600)NEWLINE self._events_in_dataset = sum(result)NEWLINENEWLINE result = pool.map_async(get_max_run, self._datasets).get(3600)NEWLINE self._max_run = max(result)NEWLINENEWLINE result = sum(pool.map_async(get_file_info, self._datasets).get(3600), [])NEWLINE files = pool.map_async(_make_file_info, result).get(3600)NEWLINE self._file_info = sorted(fileinfo for fileinfo in files)NEWLINENEWLINE self.rereco = any(len(fileinfo.runs)>1 for fileinfo in self._file_info)NEWLINENEWLINE if self._args.test_mode:NEWLINE self._file_info = self._file_info[-200:] # take only last chunk of filesNEWLINE self._files = [fileinfo.name for fileinfo in self._file_info]NEWLINENEWLINE # write information to cacheNEWLINE self._cache.set(self._events_in_dataset, self._files, self._file_info,NEWLINE self._max_run)NEWLINE self._cache.dump()NEWLINE if self._args.random:NEWLINE random.shuffle(self._file_info)NEWLINE self._files = [fileinfo.name for fileinfo in self._file_info]NEWLINENEWLINE def _create_file_lists(self):NEWLINE """Create file lists for alignment and validation."""NEWLINENEWLINE # collect files for alignment until minimal requirements are fulfilledNEWLINE self._files_alignment = []NEWLINE self._files_validation = []NEWLINE self._events_for_alignment = 0NEWLINE self._events_for_validation = 0NEWLINENEWLINE max_range = (0NEWLINE if self._args.events <= 0NEWLINE else int(math.ceil(len(self._files)*self._args.fraction)))NEWLINE use_for_alignment = TrueNEWLINE for i, fileinfo in enumerate(self._file_info):NEWLINE enough_events = self._events_for_alignment >= self._args.eventsNEWLINE fraction_exceeded = i >= max_rangeNEWLINE if enough_events or fraction_exceeded: use_for_alignment = FalseNEWLINENEWLINE dataset, f, number_of_events, runs = fileinfoNEWLINENEWLINE iovs = self._get_iovs(runs)NEWLINE if use_for_alignment:NEWLINE if iovs:NEWLINE self._events_for_alignment += number_of_eventsNEWLINE self._files_alignment.append(fileinfo)NEWLINE self._add_file_info(self._iov_info_alignment, iovs, fileinfo)NEWLINE else:NEWLINE max_range += 1 # not used -> discard in fraction calculationNEWLINE else:NEWLINE if iovs:NEWLINE self._events_for_validation += number_of_eventsNEWLINE self._files_validation.append(fileinfo)NEWLINE self._add_file_info(self._iov_info_validation, iovs, fileinfo)NEWLINE if self._args.run_by_run:NEWLINE self._add_file_info(self._run_info, runs, fileinfo)NEWLINENEWLINE self._fulfill_iov_eventcount()NEWLINENEWLINE self._split_hippy_jobs()NEWLINENEWLINENEWLINE def _fulfill_iov_eventcount(self):NEWLINE """NEWLINE Try to fulfill the requirement on the minimum number of events per IOVNEWLINE in the alignment file list by picking files from the validation list.NEWLINE """NEWLINENEWLINE for iov in self._iovs:NEWLINE if self._iov_info_alignment[iov]["events"] >= self._args.minimum_events_in_iov: continueNEWLINE for fileinfo in self._files_validation[:]:NEWLINE dataset, f, number_of_events, runs = fileinfoNEWLINE iovs = self._get_iovs(runs)NEWLINE if iov in iovs:NEWLINE self._files_alignment.append(fileinfo)NEWLINE self._events_for_alignment += number_of_eventsNEWLINE self._add_file_info(self._iov_info_alignment, iovs, fileinfo)NEWLINENEWLINE self._events_for_validation -= number_of_eventsNEWLINE self._remove_file_info(self._iov_info_validation, iovs, fileinfo)NEWLINE if self._args.run_by_run:NEWLINE self._remove_file_info(self._run_info, runs, fileinfo)NEWLINE self._files_validation.remove(fileinfo)NEWLINENEWLINE if (self._iov_info_alignment[iov]["events"]NEWLINE >= self._args.minimum_events_in_iov):NEWLINE break # break the file loop if already enough eventsNEWLINENEWLINE def _split_hippy_jobs(self):NEWLINE hippyjobs = {}NEWLINE for dataset, miniiov in itertools.product(self._datasets, self._miniiovs):NEWLINE jobsforminiiov = []NEWLINE hippyjobs[dataset,miniiov] = jobsforminiiovNEWLINE eventsinthisjob = float("inf")NEWLINE for fileinfo in self._files_alignment:NEWLINE if fileinfo.dataset != dataset: continueNEWLINE miniiovs = set(self._get_iovs(fileinfo.runs, useminiiovs=True))NEWLINE if miniiov not in miniiovs: continueNEWLINE if len(miniiovs) > 1:NEWLINE hippyjobs[dataset,miniiov] = []NEWLINE if eventsinthisjob >= self._args.hippy_events_per_job:NEWLINE currentjob = []NEWLINE jobsforminiiov.append(currentjob)NEWLINE eventsinthisjob = 0NEWLINE currentjob.append(fileinfo)NEWLINE currentjob.sort()NEWLINE eventsinthisjob += fileinfo.neventsNEWLINENEWLINE self._hippy_jobs = {NEWLINE (dataset, iov): sum((hippyjobs[dataset, miniiov]NEWLINE for miniiov in self._miniiovsNEWLINE if iov == max(_ for _ in self._iovs if _ <= miniiov)), []NEWLINE )NEWLINE for dataset, iov in itertools.product(self._datasets, self._iovs)NEWLINE }NEWLINENEWLINE def _print_eventcounts(self):NEWLINE """Print the event counts per file list and per IOV."""NEWLINENEWLINE log = os.path.join(self._output_dir, FileListCreator._event_count_log)NEWLINENEWLINE print_msg("Using {0:d} events for alignment ({1:.2f}%)."NEWLINE .format(self._events_for_alignment,NEWLINE 100.0*NEWLINE self._events_for_alignment/self._events_in_dataset),NEWLINE log_file = log)NEWLINE for iov in sorted(self._iov_info_alignment):NEWLINE print_msg(("Approximate events" if self.rereco else "Events") + " for alignment in IOV since {0:f}: {1:f}"NEWLINE .format(iov, self._iov_info_alignment[iov]["events"]),NEWLINE log_file = log)NEWLINENEWLINE print_msg("Using {0:d} events for validation ({1:.2f}%)."NEWLINE .format(self._events_for_validation,NEWLINE 100.0*NEWLINE self._events_for_validation/self._events_in_dataset),NEWLINE log_file = log)NEWLINENEWLINE for iov in sorted(self._iov_info_validation):NEWLINE msg = ("Approximate events" if self.rereco else "Events") + " for validation in IOV since {0:f}: {1:f}".format(NEWLINE iov, self._iov_info_validation[iov]["events"])NEWLINE if (self._iov_info_validation[iov]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE msg += " (not enough events -> no dataset file will be created)"NEWLINE print_msg(msg, log_file = log)NEWLINENEWLINE for run in sorted(self._run_info):NEWLINE msg = ("Approximate events" if self.rereco else "Events") + " for validation in run {0:f}: {1:f}".format(NEWLINE run, self._run_info[run]["events"])NEWLINE if (self._run_info[run]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE msg += " (not enough events -> no dataset file will be created)"NEWLINE print_msg(msg, log_file = log)NEWLINENEWLINE unused_events = (self._events_in_datasetNEWLINE - self._events_for_validationNEWLINE - self._events_for_alignment)NEWLINE if unused_events > 0 != self._events_in_dataset:NEWLINE print_msg("Unused events: {0:d} ({1:.2f}%)"NEWLINE .format(unused_events,NEWLINE 100.0*unused_events/self._events_in_dataset),NEWLINE log_file = log)NEWLINENEWLINENEWLINE def _create_dataset_ini_section(self, name, collection, json_file = None):NEWLINE """Write dataset ini snippet.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the dataset sectionNEWLINE - `collection`: track collection of this datasetNEWLINE - `json_file`: JSON file to be used for this dataset (optional)NEWLINE """NEWLINENEWLINE if json_file:NEWLINE splitted = name.split("_since")NEWLINE file_list = "_since".join(splitted[:-1]NEWLINE if len(splitted) > 1NEWLINE else splitted)NEWLINE else:NEWLINE file_list = nameNEWLINE output = "[dataset:{}]\n".format(name)NEWLINE output += "collection = {}\n".format(collection)NEWLINE output += "inputFileList = ${{datasetdir}}/{}.txt\n".format(file_list)NEWLINE output += "json = ${{datasetdir}}/{}\n".format(json_file) if json_file else ""NEWLINENEWLINE if collection in ("ALCARECOTkAlCosmicsCTF0T",NEWLINE "ALCARECOTkAlCosmicsInCollisions"):NEWLINE if self._first_dataset_ini:NEWLINE print_msg("\tDetermined cosmics dataset, i.e. please replace "NEWLINE "'DUMMY_DECO_MODE_FLAG' and 'DUMMY_ZERO_TESLA_FLAG' "NEWLINE "with the correct values.")NEWLINE self._first_dataset_ini = FalseNEWLINE output += "cosmicsDecoMode = DUMMY_DECO_MODE_FLAG\n"NEWLINE output += "cosmicsZeroTesla = DUMMY_ZERO_TESLA_FLAG\n"NEWLINE output += "\n"NEWLINENEWLINE return outputNEWLINENEWLINENEWLINE def _create_json_file(self, name, first, last = None):NEWLINE """NEWLINE Create JSON file with `name` covering runs from `first` to `last`. If aNEWLINE global JSON is provided, the resulting file is the intersection of theNEWLINE file created here and the global one.NEWLINE Returns the name of the created JSON file.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the creted JSON fileNEWLINE - `first`: first run covered by the JSON fileNEWLINE - `last`: last run covered by the JSON fileNEWLINENEWLINE """NEWLINENEWLINE if last is None: last = self._max_runNEWLINE name += "_JSON.txt"NEWLINE print_msg("Creating JSON file: "+name)NEWLINENEWLINE json_file = LumiList.LumiList(runs = range(first, last+1))NEWLINE if self._args.json:NEWLINE global_json = LumiList.LumiList(filename = self._args.json)NEWLINE json_file = json_file & global_jsonNEWLINE json_file.writeJSON(os.path.join(self._output_dir, name))NEWLINENEWLINE return nameNEWLINENEWLINENEWLINE def _get_track_collection(self, edm_file):NEWLINE """Extract track collection from given `edm_file`.NEWLINENEWLINE Arguments:NEWLINE - `edm_file`: CMSSW dataset fileNEWLINE """NEWLINENEWLINE # use global redirector to allow also files not yet at your site:NEWLINE cmd = ["edmDumpEventContent", r"root://cms-xrd-global.cern.ch/"+edm_file]NEWLINE try:NEWLINE event_content = subprocess.check_output(cmd).split("\n")NEWLINE except subprocess.CalledProcessError as e:NEWLINE splitted = edm_file.split("/")NEWLINE try:NEWLINE alcareco = splitted[splitted.index("ALCARECO")+1].split("-")[0]NEWLINE alcareco = alcareco.replace("TkAlCosmics0T", "TkAlCosmicsCTF0T")NEWLINE alcareco = "ALCARECO" + alcarecoNEWLINE print_msg("\tDetermined track collection as '{}'.".format(alcareco))NEWLINE return alcarecoNEWLINE except ValueError:NEWLINE if "RECO" in splitted:NEWLINE print_msg("\tDetermined track collection as 'generalTracks'.")NEWLINE return "generalTracks"NEWLINE else:NEWLINE print_msg("\tCould not determine track collection "NEWLINE "automatically.")NEWLINE print_msg("\tPlease replace 'DUMMY_TRACK_COLLECTION' with "NEWLINE "the correct value.")NEWLINE return "DUMMY_TRACK_COLLECTION"NEWLINENEWLINE track_collections = []NEWLINE for line in event_content:NEWLINE splitted = line.split()NEWLINE if len(splitted) > 0 and splitted[0] == r"vector<reco::Track>":NEWLINE track_collections.append(splitted[1].strip().strip('"'))NEWLINE if len(track_collections) == 0:NEWLINE print_msg("No track collection found in file '{}'.".format(edm_file))NEWLINE sys.exit(1)NEWLINE elif len(track_collections) == 1:NEWLINE print_msg("\tDetermined track collection as "NEWLINE "'{}'.".format(track_collections[0]))NEWLINE return track_collections[0]NEWLINE else:NEWLINE alcareco_tracks = filter(lambda x: x.startswith("ALCARECO"),NEWLINE track_collections)NEWLINE if len(alcareco_tracks) == 0 and "generalTracks" in track_collections:NEWLINE print_msg("\tDetermined track collection as 'generalTracks'.")NEWLINE return "generalTracks"NEWLINE elif len(alcareco_tracks) == 1:NEWLINE print_msg("\tDetermined track collection as "NEWLINE "'{}'.".format(alcareco_tracks[0]))NEWLINE return alcareco_tracks[0]NEWLINE print_msg("\tCould not unambiguously determine track collection in "NEWLINE "file '{}':".format(edm_file))NEWLINE print_msg("\tPlease replace 'DUMMY_TRACK_COLLECTION' with "NEWLINE "the correct value from the following list.")NEWLINE for collection in track_collections:NEWLINE print_msg("\t - "+collection)NEWLINE return "DUMMY_TRACK_COLLECTION"NEWLINENEWLINENEWLINE def _write_file_lists(self):NEWLINE """Write file lists to disk."""NEWLINENEWLINE self._create_dataset_txt(self._formatted_dataset, self._files_alignment)NEWLINE self._create_hippy_txt(self._formatted_dataset, sum(self._hippy_jobs.values(), []))NEWLINE self._create_dataset_cff(NEWLINE "_".join(["Alignment", self._formatted_dataset]),NEWLINE self._files_alignment)NEWLINENEWLINE self._create_dataset_cff(NEWLINE "_".join(["Validation", self._formatted_dataset]),NEWLINE self._files_validation)NEWLINENEWLINENEWLINE if self._args.create_ini:NEWLINE dataset_ini_general = "[general]\n"NEWLINE dataset_ini_general += "datasetdir = {}\n".format(self._output_dir)NEWLINE dataset_ini_general += ("json = {}\n\n".format(self._args.json)NEWLINE if self._args.jsonNEWLINE else "\n")NEWLINENEWLINE ini_path = self._formatted_dataset + ".ini"NEWLINE print_msg("Creating dataset ini file: " + ini_path)NEWLINE ini_path = os.path.join(self._output_dir, ini_path)NEWLINENEWLINE collection = self._get_track_collection(self._files[0])NEWLINENEWLINE with open(ini_path, "w") as f:NEWLINE f.write(dataset_ini_general)NEWLINE f.write(self._create_dataset_ini_section(NEWLINE self._formatted_dataset, collection))NEWLINENEWLINE iov_wise_ini = dataset_ini_generalNEWLINENEWLINE for i,iov in enumerate(sorted(self._iovs)):NEWLINE iov_str = "since{0:d}".format(iov)NEWLINE iov_str = "_".join([self._formatted_dataset, iov_str])NEWLINENEWLINE if self.rereco:NEWLINE if i == len(self._iovs) - 1:NEWLINE last = NoneNEWLINE else:NEWLINE last = sorted(self._iovs)[i+1] - 1NEWLINE local_json = self._create_json_file(iov_str, iov, last)NEWLINE else:NEWLINE local_json = NoneNEWLINENEWLINE if self._args.create_ini:NEWLINE iov_wise_ini += self._create_dataset_ini_section(iov_str,NEWLINE collection,NEWLINE local_json)NEWLINENEWLINE self._create_dataset_txt(iov_str,NEWLINE self._iov_info_alignment[iov]["files"])NEWLINE self._create_hippy_txt(iov_str, sum((self._hippy_jobs[dataset,iov] for dataset in self._datasets), []))NEWLINE self._create_dataset_cff(NEWLINE "_".join(["Alignment", iov_str]),NEWLINE self._iov_info_alignment[iov]["files"],NEWLINE json_file=local_json)NEWLINENEWLINE if (self._iov_info_validation[iov]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE continueNEWLINE self._create_dataset_cff(NEWLINE "_".join(["Validation", iov_str]),NEWLINE self._iov_info_validation[iov]["files"],NEWLINE json_file=local_json)NEWLINENEWLINE if self._args.create_ini and iov_wise_ini != dataset_ini_general:NEWLINE ini_path = self._formatted_dataset + "_IOVs.ini"NEWLINE print_msg("Creating dataset ini file: " + ini_path)NEWLINE ini_path = os.path.join(self._output_dir, ini_path)NEWLINE with open(ini_path, "w") as f: f.write(iov_wise_ini)NEWLINENEWLINE for run in sorted(self._run_info):NEWLINE if args.rereco: continue #need to implement more jsonsNEWLINE if (self._run_info[run]["events"]NEWLINE < self._args.minimum_events_validation):NEWLINE continueNEWLINE self._create_dataset_cff(NEWLINE "_".join(["Validation", self._formatted_dataset, str(run)]),NEWLINE self._run_info[run]["files"])NEWLINENEWLINENEWLINE def _create_dataset_txt(self, name, file_list):NEWLINE """Write alignment file list to disk.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the file listNEWLINE - `file_list`: list of files to write to `name`NEWLINE """NEWLINENEWLINE name += ".txt"NEWLINE print_msg("Creating dataset file list: "+name)NEWLINE with open(os.path.join(self._output_dir, name), "w") as f:NEWLINE f.write("\n".join(fileinfo.name for fileinfo in file_list))NEWLINENEWLINENEWLINE def _create_hippy_txt(self, name, job_list):NEWLINE name += "_hippy.txt"NEWLINE print_msg("Creating dataset file list for HipPy: "+name)NEWLINE with open(os.path.join(self._output_dir, name), "w") as f:NEWLINE f.write("\n".join(",".join("'"+fileinfo.name+"'" for fileinfo in job) for job in job_list)+"\n")NEWLINENEWLINENEWLINE def _create_dataset_cff(self, name, file_list, json_file = None):NEWLINE """NEWLINE Create configuration fragment to define a dataset.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of the configuration fragmentNEWLINE - `file_list`: list of files to write to `name`NEWLINE - `json_file`: JSON file to be used for this dataset (optional)NEWLINE """NEWLINENEWLINE if json_file is None: json_file = self._args.json # might still be NoneNEWLINE if json_file is not None:NEWLINE json_file = os.path.join(self._output_dir, json_file)NEWLINENEWLINE name = "_".join(["Dataset",name, "cff.py"])NEWLINE print_msg("Creating dataset configuration fragment: "+name)NEWLINENEWLINE file_list_str = ""NEWLINE for sub_list in get_chunks(file_list, 255):NEWLINE file_list_str += ("readFiles.extend([\n'"+NEWLINE "',\n'".join(fileinfo.name for fileinfo in sub_list)+NEWLINE "'\n])\n")NEWLINENEWLINE fragment = FileListCreator._dataset_template.format(NEWLINE lumi_def = ("import FWCore.PythonUtilities.LumiList as LumiList\n\n"NEWLINE "lumiSecs = cms.untracked.VLuminosityBlockRange()\n"NEWLINE "goodLumiSecs = LumiList.LumiList(filename = "NEWLINE "'{0:s}').getCMSSWString().split(',')"NEWLINE .format(json_file)NEWLINE if json_file else ""),NEWLINE lumi_arg = ("lumisToProcess = lumiSecs,\n "NEWLINE if json_file else ""),NEWLINE lumi_extend = "lumiSecs.extend(goodLumiSecs)" if json_file else "",NEWLINE files = file_list_str)NEWLINENEWLINE with open(os.path.join(self._output_dir, name), "w") as f:NEWLINE f.write(fragment)NEWLINENEWLINENEWLINE _dataset_template = """\NEWLINEimport FWCore.ParameterSet.Config as cmsNEWLINE{lumi_def:s}NEWLINEreadFiles = cms.untracked.vstring()NEWLINEsource = cms.Source("PoolSource",NEWLINE {lumi_arg:s}fileNames = readFiles)NEWLINE{files:s}{lumi_extend:s}NEWLINEmaxEvents = cms.untracked.PSet(input = cms.untracked.int32(-1))NEWLINE"""NEWLINENEWLINENEWLINEclass _DasCache(object):NEWLINE """Helper class to cache information from DAS requests."""NEWLINENEWLINE def __init__(self, file_list_id):NEWLINE """Constructor of the cache.NEWLINENEWLINE Arguments:NEWLINE - `file_list_id`: ID of the cached file listsNEWLINE """NEWLINENEWLINE self._file_list_id = file_list_idNEWLINE self._cache_file_name = os.path.join(file_list_id, ".das_cache.pkl")NEWLINE self.reset()NEWLINENEWLINENEWLINE def reset(self):NEWLINE """Reset the cache contents and the 'empty' flag."""NEWLINENEWLINE self._empty = TrueNEWLINE self._events_in_dataset = 0NEWLINE self._files = []NEWLINE self._file_info = []NEWLINE self._max_run = NoneNEWLINENEWLINENEWLINE def set(self, total_events, file_list, file_info, max_run):NEWLINE """Set the content of the cache.NEWLINENEWLINE Arguments:NEWLINE - `total_events`: total number of events in datasetNEWLINE - `file_list`: list of files in datasetNEWLINE - `file_info`: dictionary with numbers of events per fileNEWLINE - `max_run`: highest run number contained in the datasetNEWLINE """NEWLINENEWLINE self._events_in_dataset = total_eventsNEWLINE self._files = file_listNEWLINE self._file_info = file_infoNEWLINE self._max_run = max_runNEWLINE self._empty = FalseNEWLINENEWLINENEWLINE def get(self):NEWLINE """NEWLINE Get the content of the cache as tuple:NEWLINE result = (total number of events in dataset,NEWLINE list of files in dataset,NEWLINE dictionary with numbers of events and runs per file)NEWLINE """NEWLINENEWLINE return self._events_in_dataset, self._files, self._file_info, self._max_runNEWLINENEWLINENEWLINE def load(self):NEWLINE """Loads the cached contents."""NEWLINENEWLINE if not self.empty:NEWLINE print_msg("Overriding file information with cached information.")NEWLINE try:NEWLINE with open(self._cache_file_name, "rb") as f:NEWLINE tmp_dict = cPickle.load(f)NEWLINE self.__dict__.update(tmp_dict)NEWLINE except IOError as e:NEWLINE if e.args == (2, "No such file or directory"):NEWLINE msg = "Failed to load cache for '{}'.".format(self._file_list_id)NEWLINE if not self.empty:NEWLINE msg += " Keeping the previous file information."NEWLINE print_msg(msg)NEWLINE else:NEWLINE raiseNEWLINENEWLINENEWLINE def dump(self):NEWLINE """Dumps the contents to the cache file."""NEWLINENEWLINE if self.empty:NEWLINE print_msg("Cache is empty. Not writing to file.")NEWLINE returnNEWLINENEWLINE with open(self._cache_file_name, "wb") as f:NEWLINE cPickle.dump(self.__dict__, f, 2)NEWLINENEWLINENEWLINE @propertyNEWLINE def empty(self):NEWLINE """NEWLINE Flag indicating whether the cache is empty or has been filled (possiblyNEWLINE with nothing).NEWLINE """NEWLINENEWLINE return self._emptyNEWLINENEWLINENEWLINENEWLINE################################################################################NEWLINEdef das_client(query, check_key = None):NEWLINE """NEWLINE Submit `query` to DAS client and handle possible errors.NEWLINE Further treatment of the output might be necessary.NEWLINENEWLINE Arguments:NEWLINE - `query`: DAS queryNEWLINE - `check_key`: optional key to be checked for; retriggers query if neededNEWLINE """NEWLINENEWLINE error = TrueNEWLINE for i in range(5): # maximum of 5 triesNEWLINE try:NEWLINE das_data = cmssw_das_client.get_data(query, limit = 0)NEWLINE except IOError as e:NEWLINE if e.errno == 14: #https://stackoverflow.com/q/36397853/5228524NEWLINE continueNEWLINE except ValueError as e:NEWLINE if str(e) == "No JSON object could be decoded":NEWLINE continueNEWLINENEWLINE if das_data["status"] == "ok":NEWLINE if das_data["nresults"] == 0 or check_key is None:NEWLINE error = FalseNEWLINE breakNEWLINENEWLINE result_count = 0NEWLINE for d in find_key(das_data["data"], [check_key]):NEWLINE result_count += len(d)NEWLINE if result_count == 0:NEWLINE das_data["status"] = "error"NEWLINE das_data["reason"] = ("DAS did not return required data.")NEWLINE continueNEWLINE else:NEWLINE error = FalseNEWLINE breakNEWLINENEWLINE if das_data["status"] == "error":NEWLINE print_msg("DAS query '{}' failed 5 times. "NEWLINE "The last time for the the following reason:".format(query))NEWLINE print(das_data["reason"])NEWLINE sys.exit(1)NEWLINE return das_data["data"]NEWLINENEWLINENEWLINEdef find_key(collection, key_chain):NEWLINE """Searches for `key` in `collection` and returns first corresponding value.NEWLINENEWLINE Arguments:NEWLINE - `collection`: list of dictionariesNEWLINE - `key_chain`: chain of keys to be searched forNEWLINE """NEWLINENEWLINE result = NoneNEWLINE for i,key in enumerate(key_chain):NEWLINE for item in collection:NEWLINE if key in item:NEWLINE if i == len(key_chain) - 1:NEWLINE result = item[key]NEWLINE else:NEWLINE try:NEWLINE result = find_key(item[key], key_chain[i+1:])NEWLINE except LookupError:NEWLINE pass # continue with next `item` in `collection`NEWLINE else:NEWLINE pass # continue with next `item` in `collection`NEWLINENEWLINE if result is not None: return resultNEWLINE raise LookupError(key_chain, collection) # putNEWLINENEWLINENEWLINEdef print_msg(text, line_break = True, log_file = None):NEWLINE """Formatted printing of `text`.NEWLINENEWLINE Arguments:NEWLINE - `text`: string to be printedNEWLINE """NEWLINENEWLINE msg = " >>> " + str(text)NEWLINE if line_break:NEWLINE print(msg)NEWLINE else:NEWLINE print(msg, end=' ')NEWLINE sys.stdout.flush()NEWLINE if log_file:NEWLINE with open(log_file, "a") as f: f.write(msg+"\n")NEWLINE return msgNEWLINENEWLINENEWLINEdef get_runs(file_name):NEWLINE """NEWLINE Try to guess the run number from `file_name`. If run could not beNEWLINE determined, gets the run numbers from DAS (slow!)NEWLINENEWLINE Arguments:NEWLINE - `file_name`: name of the considered fileNEWLINE """NEWLINE try:NEWLINE return [int("".join(file_name.split("/")[-4:-2]))]NEWLINE except ValueError:NEWLINE query = "run file="+file_name+" system=dbs3"NEWLINE return [int(_) for _ in find_key(das_client(query), ["run", "run_number"])]NEWLINENEWLINENEWLINEdef get_max_run(dataset_name):NEWLINE """Retrieve the maximum run number in `dataset_name`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_name`: name of the datasetNEWLINE """NEWLINENEWLINE data = das_client("run dataset={0:s} system=dbs3".format(dataset_name))NEWLINE runs = [f["run"][0]["run_number"] for f in data]NEWLINE return max(runs)NEWLINENEWLINENEWLINEdef get_files(dataset_name):NEWLINE """Retrieve list of files in `dataset_name`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_name`: name of the datasetNEWLINE """NEWLINENEWLINE data = das_client(("file dataset={0:s} system=dbs3 detail=True | "+NEWLINE "grep file.name, file.nevents > 0").format(dataset_name),NEWLINE "file")NEWLINE return [find_key(f["file"], ["name"]) for f in data]NEWLINENEWLINENEWLINEdef get_datasets(dataset_pattern):NEWLINE """Retrieve list of dataset matching `dataset_pattern`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_pattern`: pattern of dataset namesNEWLINE """NEWLINENEWLINE data = das_client("dataset dataset={0:s} system=dbs3 detail=True"NEWLINE "| grep dataset.name".format(dataset_pattern), "dataset")NEWLINE return sorted(set([find_key(f["dataset"], ["name"]) for f in data]))NEWLINENEWLINENEWLINEdef get_events_per_dataset(dataset_name):NEWLINE """Retrieve the number of a events in `dataset_name`.NEWLINENEWLINE Arguments:NEWLINE - `dataset_name`: name of a datasetNEWLINE """NEWLINENEWLINE return _get_events("dataset", dataset_name)NEWLINENEWLINENEWLINEdef get_events_per_file(file_name):NEWLINE """Retrieve the number of a events in `file_name`.NEWLINENEWLINE Arguments:NEWLINE - `file_name`: name of a dataset fileNEWLINE """NEWLINENEWLINE return _get_events("file", file_name)NEWLINENEWLINENEWLINEdef _get_events(entity, name):NEWLINE """Retrieve the number of events from `entity` called `name`.NEWLINENEWLINE Arguments:NEWLINE - `entity`: type of entityNEWLINE - `name`: name of entityNEWLINE """NEWLINENEWLINE data = das_client("{0:s}={1:s} system=dbs3 detail=True | grep {0:s}.nevents"NEWLINE .format(entity, name), entity)NEWLINE return int(find_key(data, [entity, "nevents"]))NEWLINENEWLINENEWLINEdef _get_properties(name, entity, properties, filters = None, sub_entity = None,NEWLINE aggregators = None):NEWLINE """Retrieve `properties` from `entity` called `name`.NEWLINENEWLINE Arguments:NEWLINE - `name`: name of entityNEWLINE - `entity`: type of entityNEWLINE - `properties`: list of property namesNEWLINE - `filters`: list of filters on propertiesNEWLINE - `sub_entity`: type of entity from which to extract the properties;NEWLINE defaults to `entity`NEWLINE - `aggregators`: additional aggregators/filters to amend to queryNEWLINE """NEWLINENEWLINE if sub_entity is None: sub_entity = entityNEWLINE if filters is None: filters = []NEWLINE props = ["{0:s}.{1:s}".format(sub_entity,prop.split()[0])NEWLINE for prop in properties]NEWLINE conditions = ["{0:s}.{1:s}".format(sub_entity, filt)NEWLINE for filt in filters]NEWLINE add_ons = "" if aggregators is None else " | "+" | ".join(aggregators)NEWLINENEWLINE data = das_client("{0:s} {1:s}={2:s} system=dbs3 detail=True | grep {3:s}{4:s}"NEWLINE .format(sub_entity, entity, name,NEWLINE ", ".join(props+conditions), add_ons), sub_entity)NEWLINE return [[find_key(f[sub_entity], [prop]) for prop in properties] for f in data]NEWLINENEWLINEdef get_file_info(dataset):NEWLINE result = _get_properties(name=dataset,NEWLINE properties = ["name", "nevents"],NEWLINE filters = ["nevents > 0"],NEWLINE entity = "dataset",NEWLINE sub_entity = "file")NEWLINE return [(dataset, name, nevents) for name, nevents in result]NEWLINENEWLINENEWLINENEWLINEFileInfo = collections.namedtuple("FileInfo", "dataset name nevents runs")NEWLINENEWLINEdef _make_file_info(dataset_name_nevents):NEWLINE return FileInfo(*dataset_name_nevents, runs=get_runs(dataset_name_nevents[1]))NEWLINENEWLINEdef get_chunks(long_list, chunk_size):NEWLINE """NEWLINE Generates list of sub-lists of `long_list` with a maximum size ofNEWLINE `chunk_size`.NEWLINENEWLINE Arguments:NEWLINE - `long_list`: original listNEWLINE - `chunk_size`: maximum size of created sub-listsNEWLINE """NEWLINENEWLINE for i in range(0, len(long_list), chunk_size):NEWLINE yield long_list[i:i+chunk_size]NEWLINENEWLINENEWLINEdef merge_strings(strings):NEWLINE """Merge strings in `strings` into a common string.NEWLINENEWLINE Arguments:NEWLINE - `strings`: list of stringsNEWLINE """NEWLINENEWLINE if type(strings) == str:NEWLINE return stringsNEWLINE elif len(strings) == 0:NEWLINE return ""NEWLINE elif len(strings) == 1:NEWLINE return strings[0]NEWLINE elif len(strings) == 2:NEWLINE first = strings[0]NEWLINE second = strings[1]NEWLINE else:NEWLINE first = merge_strings(strings[:-1])NEWLINE second = strings[-1]NEWLINENEWLINE merged_string = ""NEWLINE blocks = difflib.SequenceMatcher(None, first, second).get_matching_blocks()NEWLINENEWLINE last_i, last_j, last_n = 0, 0, 0NEWLINE for i, j, n in blocks:NEWLINE merged_string += first[last_i+last_n:i]NEWLINE merged_string += second[last_j+last_n:j]NEWLINE merged_string += first[i:i+n]NEWLINE last_i, last_j, last_n = i, j, nNEWLINENEWLINE return str(merged_string)NEWLINENEWLINENEWLINE################################################################################NEWLINEif __name__ == "__main__":NEWLINE try:NEWLINE main()NEWLINE except KeyboardInterrupt:NEWLINE passNEWLINE
# coding=utf-8NEWLINE# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***NEWLINE# *** Do not edit by hand unless you're certain you know what you are doing! ***NEWLINENEWLINEfrom . import _utilitiesNEWLINEimport typingNEWLINE# Export this package's modules as members:NEWLINEfrom .app import *NEWLINEfrom .app_role_attachment import *NEWLINEfrom .app_rule import *NEWLINEfrom .auth_server import *NEWLINEfrom .get_user import *NEWLINEfrom .get_users import *NEWLINEfrom .oidc_app import *NEWLINEfrom .privilege import *NEWLINEfrom .provider import *NEWLINEfrom .role import *NEWLINEfrom .saml_app import *NEWLINEfrom .smart_hook import *NEWLINEfrom .smart_hook_environment_variable import *NEWLINEfrom .user import *NEWLINEfrom .user_mapping import *NEWLINEfrom ._inputs import *NEWLINEfrom . import outputsNEWLINENEWLINE# Make subpackages available:NEWLINEif typing.TYPE_CHECKING:NEWLINE import pulumi_onelogin.config as configNEWLINEelse:NEWLINE config = _utilities.lazy_import('pulumi_onelogin.config')NEWLINENEWLINE_utilities.register(NEWLINE resource_modules="""NEWLINE[NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/app",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/app:App": "App"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/appRoleAttachment",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/appRoleAttachment:AppRoleAttachment": "AppRoleAttachment"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/appRule",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/appRule:AppRule": "AppRule"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/authServer",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/authServer:AuthServer": "AuthServer"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/oidcApp",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/oidcApp:OidcApp": "OidcApp"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/privilege",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/privilege:Privilege": "Privilege"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/role",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/role:Role": "Role"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/samlApp",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/samlApp:SamlApp": "SamlApp"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/smartHook",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/smartHook:SmartHook": "SmartHook"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/smartHookEnvironmentVariable",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/smartHookEnvironmentVariable:SmartHookEnvironmentVariable": "SmartHookEnvironmentVariable"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/user",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/user:User": "User"NEWLINE }NEWLINE },NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "mod": "index/userMapping",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "classes": {NEWLINE "onelogin:index/userMapping:UserMapping": "UserMapping"NEWLINE }NEWLINE }NEWLINE]NEWLINE""",NEWLINE resource_packages="""NEWLINE[NEWLINE {NEWLINE "pkg": "onelogin",NEWLINE "token": "pulumi:providers:onelogin",NEWLINE "fqn": "pulumi_onelogin",NEWLINE "class": "Provider"NEWLINE }NEWLINE]NEWLINE"""NEWLINE)NEWLINE
# coding: utf-8NEWLINENEWLINE"""NEWLINE KubernetesNEWLINENEWLINE No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)NEWLINENEWLINE OpenAPI spec version: v1.13.5NEWLINE NEWLINE Generated by: https://github.com/swagger-api/swagger-codegen.gitNEWLINE"""NEWLINENEWLINENEWLINEfrom __future__ import absolute_importNEWLINENEWLINEimport osNEWLINEimport sysNEWLINEimport unittestNEWLINENEWLINEimport kubernetes.clientNEWLINEfrom kubernetes.client.rest import ApiExceptionNEWLINEfrom kubernetes.client.models.v1_rolling_update_stateful_set_strategy import V1RollingUpdateStatefulSetStrategyNEWLINENEWLINENEWLINEclass TestV1RollingUpdateStatefulSetStrategy(unittest.TestCase):NEWLINE """ V1RollingUpdateStatefulSetStrategy unit test stubs """NEWLINENEWLINE def setUp(self):NEWLINE passNEWLINENEWLINE def tearDown(self):NEWLINE passNEWLINENEWLINE def testV1RollingUpdateStatefulSetStrategy(self):NEWLINE """NEWLINE Test V1RollingUpdateStatefulSetStrategyNEWLINE """NEWLINE # FIXME: construct object with mandatory attributes with example valuesNEWLINE #model = kubernetes.client.models.v1_rolling_update_stateful_set_strategy.V1RollingUpdateStatefulSetStrategy()NEWLINE passNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE unittest.main()NEWLINE
import numpy as npNEWLINEimport matplotlib.pyplot as pltNEWLINENEWLINE# plt.style.use("dark_background")NEWLINENEWLINEfrom matplotlib.patches import PolygonNEWLINEfrom matplotlib.collections import PatchCollectionNEWLINEfrom matplotlib.collections import PolyCollectionNEWLINENEWLINEfrom mpl_toolkits.axes_grid1 import AxesGridNEWLINENEWLINE# import mpl_toolkitsNEWLINENEWLINE# = mpl_toolkits.legacy_colorbarNEWLINENEWLINE# legacy_colorbar.rcParam = FalseNEWLINENEWLINEfrom math import acosNEWLINEfrom math import degreesNEWLINEfrom math import fabsNEWLINENEWLINEfrom sklearn.cluster import KMeansNEWLINEfrom sklearn.cluster import SpectralClusteringNEWLINENEWLINEfrom directional_clustering.geometry import clockwiseNEWLINEfrom directional_clustering.geometry import laplacian_smoothedNEWLINEfrom directional_clustering.geometry import cosine_similarityNEWLINEfrom directional_clustering.geometry import contour_polygonsNEWLINENEWLINEfrom directional_clustering.clusters import kmeans_fitNEWLINEfrom directional_clustering.clusters import init_kmeans_farthestNEWLINEfrom directional_clustering.clusters import _kmeansNEWLINENEWLINEfrom directional_clustering.plotters import ClusterPlotterNEWLINEfrom directional_clustering.plotters import rgb_colorsNEWLINEfrom directional_clustering.plotters import plot_kmeans_vectorsNEWLINENEWLINEfrom compas.datastructures import MeshNEWLINEfrom compas.datastructures import mesh_unify_cyclesNEWLINENEWLINEfrom compas.geometry import dot_vectorsNEWLINEfrom compas.geometry import scale_vectorNEWLINEfrom compas.geometry import normalize_vectorNEWLINEfrom compas.geometry import length_vectorNEWLINEfrom compas.geometry import angle_vectorsNEWLINEfrom compas.geometry import length_vector_sqrdNEWLINEfrom compas.geometry import subtract_vectorsNEWLINENEWLINEfrom compas.utilities import geometric_keyNEWLINENEWLINE# =============================================================================NEWLINE# ConstantsNEWLINE# =============================================================================NEWLINENEWLINEtags = [NEWLINE "n_1",NEWLINE "n_2",NEWLINE "m_1",NEWLINE "m_2",NEWLINE "ps_1_top",NEWLINE "ps_1_bot",NEWLINE "ps_1_mid",NEWLINE "ps_2_top",NEWLINE "ps_2_bot",NEWLINE "ps_2_mid",NEWLINE "custom_1",NEWLINE "custom_2"NEWLINE ]NEWLINENEWLINENEWLINETHERE = "/Users/arpj/code/libraries/libigl/tutorial/508_ARP_MIQ/"NEWLINENEWLINE# HERE = "../data/json_files/two_point_wall" # leonhardtNEWLINE# HERE = "../data/json_files/wall_with_hole" # schlaichNEWLINE# HERE = "../data/json_files/cantilever_wall_3_1" # rozvany?NEWLINE# HERE = "../data/json_files/square_wall_cantilever" # michellNEWLINE# HERE = "../data/json_files/square_wall_down" # schlaichNEWLINE# HERE = "../data/json_files/perimeter_supported_slab"NEWLINENEWLINEHERE = "../data/json_files/four_point_slab"NEWLINE# HERE = "../data/json_files/four_point_slab_k_7"NEWLINE# HERE = "../data/json_files/perimeter_supported_slab_k_5"NEWLINE# HERE = "../data/json_files/perimeter_supported_slab"NEWLINEHERE = "../data/json_files/perimeter_supported_vault_z500mm_k_3" #vaultNEWLINENEWLINEtag = "n_1_k"NEWLINEtag_2 = "n_2_k"NEWLINENEWLINE# tag = "m_1_k"NEWLINE# tag_2 = "m_2_k"NEWLINENEWLINEx_lim = -10.0 # faces stay if x coord of their centroid is larger than x_limNEWLINEy_lim = -10.0 # faces stay if y coord of their centroid is larger than y_limNEWLINENEWLINE# =============================================================================NEWLINE# Import meshNEWLINE# =============================================================================NEWLINENEWLINEname = HERE.split("/").pop()NEWLINEmesh = Mesh.from_json(HERE + ".json")NEWLINEmesh_unify_cycles(mesh)NEWLINENEWLINE# ==========================================================================NEWLINE# Store subset attributesNEWLINE# ==========================================================================NEWLINENEWLINEcentroids = {}NEWLINEvectors = {}NEWLINEvectors_2 = {}NEWLINENEWLINEfor fkey in mesh.faces():NEWLINE centroids[geometric_key(mesh.face_centroid(fkey))] = fkeyNEWLINE vectors[fkey] = mesh.face_attribute(fkey, tag)NEWLINE vectors_2[fkey] = mesh.face_attribute(fkey, tag_2)NEWLINENEWLINE# ==========================================================================NEWLINE# Rebuild mesh - necessary to match ordering of collection.set(array)! NEWLINE# ==========================================================================NEWLINENEWLINEpolygons = []NEWLINEfor fkey in mesh.faces():NEWLINE x, y, z = mesh.face_centroid(fkey)NEWLINE if x >= x_lim and y >= y_lim:NEWLINE polygons.append(mesh.face_coordinates(fkey))NEWLINENEWLINEmesh = Mesh.from_polygons(polygons)NEWLINEmesh_unify_cycles(mesh)NEWLINENEWLINEfor fkey in mesh.faces():NEWLINE gkey = geometric_key(mesh.face_centroid(fkey))NEWLINE ofkey = centroids[gkey]NEWLINE vector = vectors[ofkey]NEWLINE vector_2 = vectors_2[ofkey]NEWLINE mesh.face_attribute(fkey, tag, vector)NEWLINE mesh.face_attribute(fkey, tag_2, vector_2)NEWLINENEWLINE# =============================================================================NEWLINE# Export vertices and facesNEWLINE# =============================================================================NEWLINENEWLINEvertices, faces = mesh.to_vertices_and_faces()NEWLINENEWLINEV = np.array(vertices)NEWLINEprint("V shape: ", V.shape)NEWLINEprint("V first row: {}".format(V[0,:]))NEWLINEprint("V last row: {}".format(V[-1,:]))NEWLINENEWLINEF = np.array(faces)NEWLINEprint("F shape: ", F.shape)NEWLINEprint("F first row: {}".format(F[0,:]))NEWLINEprint("F last row: {}".format(F[-1,:]))NEWLINENEWLINEnp.savetxt(THERE + "vertices.txt", V, fmt="%1.6f", delimiter=" ", encoding=None)NEWLINEnp.savetxt(THERE + "faces.txt", F, fmt="%d", delimiter=" ", encoding=None)NEWLINENEWLINE# # =============================================================================NEWLINE# # Export edges on boundaryNEWLINE# # =============================================================================NEWLINENEWLINE# E = np.array(mesh.edges_on_boundary())NEWLINE# print("E shape: ", E.shape)NEWLINE# print("E first row: {}".format(E[0,:]))NEWLINE# print("E last row: {}".format(E[-1,:]))NEWLINENEWLINE# np.savetxt(THERE + "edges_boundary.txt", E, fmt="%d", delimiter=" ", encoding=None)NEWLINENEWLINE# # =============================================================================NEWLINE# # Export vertices on boundaryNEWLINE# # =============================================================================NEWLINENEWLINE# B = np.array(mesh.vertices_on_boundary())NEWLINE# print("B shape: ", B.shape)NEWLINE# print("B first row: {}".format(B[0]))NEWLINE# print("B last row: {}".format(B[-1]))NEWLINENEWLINE# np.savetxt(THERE + "vertices_boundary.txt", E, fmt="%d", delimiter=" ", encoding=None)NEWLINENEWLINE# =============================================================================NEWLINE# Principal stress directionsNEWLINE# =============================================================================NEWLINENEWLINEps1 = mesh.faces_attribute(name=tag, keys=mesh.faces())NEWLINEps1 = [normalize_vector(vector) for vector in ps1]NEWLINENEWLINEPS1 = np.array(ps1)NEWLINEprint("PS1 shape: ", PS1.shape)NEWLINEprint("PS1 first row: {}".format(PS1[0,:]))NEWLINEprint("PS1 last row: {}".format(PS1[-1,:]))NEWLINENEWLINEps2 = mesh.faces_attribute(name=tag_2, keys=mesh.faces())NEWLINEps2 = [normalize_vector(vector) for vector in ps2]NEWLINENEWLINEPS2 = np.array(ps2)NEWLINEprint("PS2 shape: ", PS2.shape)NEWLINEprint("PS2 first row: {}".format(PS2[0,:]))NEWLINEprint("PS2 last row: {}".format(PS2[-1,:]))NEWLINENEWLINEnp.savetxt(THERE + "ps1.txt", PS1, fmt="%1.6f", delimiter=" ", encoding=None)NEWLINEnp.savetxt(THERE + "ps2.txt", PS2, fmt="%1.6f", delimiter=" ", encoding=None)NEWLINENEWLINEprint("Dot product first row PS1 - PS2: {}".format(np.dot(PS1[0, :], PS2[0,:].T)))NEWLINE
# coding: utf-8NEWLINENEWLINE"""NEWLINE Isilon SDKNEWLINENEWLINE Isilon SDK - Language bindings for the OneFS API # noqa: E501NEWLINENEWLINE OpenAPI spec version: 4NEWLINE Contact: sdk@isilon.comNEWLINE Generated by: https://github.com/swagger-api/swagger-codegen.gitNEWLINE"""NEWLINENEWLINENEWLINEfrom __future__ import absolute_importNEWLINENEWLINEimport unittestNEWLINENEWLINEimport isi_sdk_8_0_1NEWLINEfrom isi_sdk_8_0_1.models.event_channel_parameters import EventChannelParameters # noqa: E501NEWLINEfrom isi_sdk_8_0_1.rest import ApiExceptionNEWLINENEWLINENEWLINEclass TestEventChannelParameters(unittest.TestCase):NEWLINE """EventChannelParameters unit test stubs"""NEWLINENEWLINE def setUp(self):NEWLINE passNEWLINENEWLINE def tearDown(self):NEWLINE passNEWLINENEWLINE def testEventChannelParameters(self):NEWLINE """Test EventChannelParameters"""NEWLINE # FIXME: construct object with mandatory attributes with example valuesNEWLINE # model = isi_sdk_8_0_1.models.event_channel_parameters.EventChannelParameters() # noqa: E501NEWLINE passNEWLINENEWLINENEWLINEif __name__ == '__main__':NEWLINE unittest.main()NEWLINE
NEWLINEfrom e2cnn.kernels import KernelBasis, EmptyBasisExceptionNEWLINEfrom e2cnn.gspaces import *NEWLINEfrom e2cnn.nn import FieldTypeNEWLINEfrom .. import utilsNEWLINENEWLINEfrom .basisexpansion import BasisExpansionNEWLINEfrom .basisexpansion_singleblock import block_basisexpansionNEWLINENEWLINEfrom collections import defaultdictNEWLINENEWLINEfrom typing import Callable, List, Iterable, Dict, UnionNEWLINENEWLINEimport torchNEWLINEimport numpy as npNEWLINENEWLINENEWLINE__all__ = ["BlocksBasisExpansion"]NEWLINENEWLINENEWLINEclass BlocksBasisExpansion(BasisExpansion):NEWLINE NEWLINE def __init__(self,NEWLINE in_type: FieldType,NEWLINE out_type: FieldType,NEWLINE points: np.ndarray,NEWLINE sigma: List[float],NEWLINE rings: List[float],NEWLINE basis_filter: Callable[[dict], bool] = None,NEWLINE recompute: bool = False,NEWLINE **kwargsNEWLINE ):NEWLINE r"""NEWLINE NEWLINE With this algorithm, the expansion is done on the intertwiners of the fields' representations pairs in input andNEWLINE output.NEWLINE NEWLINE Args:NEWLINE in_type (FieldType): the input field typeNEWLINE out_type (FieldType): the output field typeNEWLINE points (~numpy.ndarray): points where the analytical basis should be sampledNEWLINE sigma (list): width of each ring where the bases are sampledNEWLINE rings (list): radii of the rings where to sample the basesNEWLINE basis_filter (callable, optional): filter for the basis elements. Should take a dictionary containing anNEWLINE element's attributes and return whether to keep it or not.NEWLINE recompute (bool, optional): whether to recompute new bases or reuse, if possible, already built tensors.NEWLINE **kwargs: keyword arguments specific to the groups and basis usedNEWLINE NEWLINE Attributes:NEWLINE S (int): number of points where the filters are sampledNEWLINE NEWLINE """NEWLINENEWLINE assert in_type.gspace == out_type.gspaceNEWLINE assert isinstance(in_type.gspace, GeneralOnR2)NEWLINE NEWLINE super(BlocksBasisExpansion, self).__init__()NEWLINE self._in_type = in_typeNEWLINE self._out_type = out_typeNEWLINE self._input_size = in_type.sizeNEWLINE self._output_size = out_type.sizeNEWLINE self.points = pointsNEWLINE NEWLINE # int: number of points where the filters are sampledNEWLINE self.S = self.points.shape[1]NEWLINENEWLINE space = in_type.gspaceNEWLINENEWLINE # we group the basis vectors by their input and output representationsNEWLINE _block_expansion_modules = {}NEWLINE NEWLINE # iterate through all different pairs of input/output representationionsNEWLINE # and, for each of them, build a basisNEWLINE for i_repr in in_type._unique_representations:NEWLINE for o_repr in out_type._unique_representations:NEWLINE reprs_names = (i_repr.name, o_repr.name)NEWLINE try:NEWLINE NEWLINE basis = space.build_kernel_basis(i_repr, o_repr,NEWLINE sigma=sigma,NEWLINE rings=rings,NEWLINE **kwargs)NEWLINE NEWLINE block_expansion = block_basisexpansion(basis, points, basis_filter, recompute=recompute)NEWLINE _block_expansion_modules[reprs_names] = block_expansionNEWLINE NEWLINE # register the block expansion as a submoduleNEWLINE self.add_module(f"block_expansion_{reprs_names}", block_expansion)NEWLINE NEWLINE except EmptyBasisException:NEWLINE # print(f"Empty basis at {reprs_names}")NEWLINE passNEWLINENEWLINE self._n_pairs = len(in_type._unique_representations) * len(out_type._unique_representations)NEWLINENEWLINE # the list of all pairs of input/output representations which don't have an empty basisNEWLINE self._representations_pairs = sorted(list(_block_expansion_modules.keys()))NEWLINE NEWLINE # retrieve for each representation in both input and output fields:NEWLINE # - the number of its occurrences,NEWLINE # - the indices where it occurs andNEWLINE # - whether its occurrences are contiguous or notNEWLINE self._in_count, _in_indices, _in_contiguous = _retrieve_indices(in_type)NEWLINE self._out_count, _out_indices, _out_contiguous = _retrieve_indices(out_type)NEWLINE NEWLINE # compute the attributes and an id for each basis element (and, so, of each parameter)NEWLINE # attributes, basis_ids = _compute_attrs_and_ids(in_type, out_type, _block_expansion_modules)NEWLINE basis_ids = _compute_attrs_and_ids(in_type, out_type, _block_expansion_modules)NEWLINE NEWLINE self._weights_ranges = {}NEWLINENEWLINE last_weight_position = 0NEWLINENEWLINE self._ids_to_basis = {}NEWLINE self._basis_to_ids = []NEWLINE NEWLINE self._contiguous = {}NEWLINE NEWLINE # iterate through the different group of blocksNEWLINE # i.e., through all input/output pairsNEWLINE for io_pair in self._representations_pairs:NEWLINE NEWLINE self._contiguous[io_pair] = _in_contiguous[io_pair[0]] and _out_contiguous[io_pair[1]]NEWLINE NEWLINE # build the indices tensorsNEWLINE if self._contiguous[io_pair]:NEWLINE # in_indices = torch.LongTensor([NEWLINE in_indices = [NEWLINE _in_indices[io_pair[0]].min(),NEWLINE _in_indices[io_pair[0]].max() + 1,NEWLINE _in_indices[io_pair[0]].max() + 1 - _in_indices[io_pair[0]].min()NEWLINE ]# )NEWLINE # out_indices = torch.LongTensor([NEWLINE out_indices = [NEWLINE _out_indices[io_pair[1]].min(),NEWLINE _out_indices[io_pair[1]].max() + 1,NEWLINE _out_indices[io_pair[1]].max() + 1 - _out_indices[io_pair[1]].min()NEWLINE ] #)NEWLINE NEWLINE setattr(self, 'in_indices_{}'.format(io_pair), in_indices)NEWLINE setattr(self, 'out_indices_{}'.format(io_pair), out_indices)NEWLINENEWLINE else:NEWLINE out_indices, in_indices = torch.meshgrid([_out_indices[io_pair[1]], _in_indices[io_pair[0]]])NEWLINE in_indices = in_indices.reshape(-1)NEWLINE out_indices = out_indices.reshape(-1)NEWLINE NEWLINE # register the indices tensors and the bases tensors as parameters of this moduleNEWLINE self.register_buffer('in_indices_{}'.format(io_pair), in_indices)NEWLINE self.register_buffer('out_indices_{}'.format(io_pair), out_indices)NEWLINE NEWLINE # count the actual number of parametersNEWLINE total_weights = len(basis_ids[io_pair])NEWLINENEWLINE for i, id in enumerate(basis_ids[io_pair]):NEWLINE self._ids_to_basis[id] = last_weight_position + iNEWLINE NEWLINE self._basis_to_ids += basis_ids[io_pair]NEWLINE NEWLINE # evaluate the indices in the global weights tensor to use for the basis belonging to this groupNEWLINE self._weights_ranges[io_pair] = (last_weight_position, last_weight_position + total_weights)NEWLINE NEWLINE # increment the position counterNEWLINE last_weight_position += total_weightsNEWLINENEWLINE def get_basis_names(self) -> List[str]:NEWLINE return self._basis_to_idsNEWLINE NEWLINE def get_element_info(self, name: Union[str, int]) -> Dict:NEWLINE if isinstance(name, str):NEWLINE idx = self._ids_to_basis[name]NEWLINE else:NEWLINE idx = nameNEWLINE NEWLINE reprs_names = NoneNEWLINE relative_idx = NoneNEWLINE for pair, idx_range in self._weights_ranges.items():NEWLINE if idx_range[0] <= idx < idx_range[1]:NEWLINE reprs_names = pairNEWLINE relative_idx = idx - idx_range[0]NEWLINE breakNEWLINE assert reprs_names is not None and relative_idx is not NoneNEWLINE NEWLINE block_expansion = getattr(self, f"block_expansion_{reprs_names}")NEWLINE block_idx = relative_idx // block_expansion.dimension()NEWLINE relative_idx = relative_idx % block_expansion.dimension()NEWLINE NEWLINE attr = block_expansion.get_element_info(relative_idx).copy()NEWLINE NEWLINE block_count = 0NEWLINE out_irreps_count = 0NEWLINE for o, o_repr in enumerate(self._out_type.representations):NEWLINE in_irreps_count = 0NEWLINE for i, i_repr in enumerate(self._in_type.representations):NEWLINE NEWLINE if reprs_names == (i_repr.name, o_repr.name):NEWLINE NEWLINE if block_count == block_idx:NEWLINENEWLINE # retrieve the attributes of each basis element and build a new list ofNEWLINE # attributes adding information specific to the current blockNEWLINE attr.update({NEWLINE "in_irreps_position": in_irreps_count + attr["in_irrep_idx"],NEWLINE "out_irreps_position": out_irreps_count + attr["out_irrep_idx"],NEWLINE "in_repr": reprs_names[0],NEWLINE "out_repr": reprs_names[1],NEWLINE "in_field_position": i,NEWLINE "out_field_position": o,NEWLINE })NEWLINE NEWLINE # build the ids of the basis vectorsNEWLINE # add names and indices of the input and output fieldsNEWLINE id = '({}-{},{}-{})'.format(i_repr.name, i, o_repr.name, o)NEWLINE # add the original id in the block submoduleNEWLINE id += "_" + attr["id"]NEWLINE NEWLINE # update with the new idNEWLINE attr["id"] = idNEWLINE NEWLINE attr["idx"] = idxNEWLINE NEWLINE return attrNEWLINE NEWLINE block_count += 1NEWLINE NEWLINE in_irreps_count += len(i_repr.irreps)NEWLINE out_irreps_count += len(o_repr.irreps)NEWLINE NEWLINE raise ValueError(f"Parameter with index {idx} not found!")NEWLINENEWLINE def get_basis_info(self) -> Iterable:NEWLINE NEWLINE out_irreps_counts = [0]NEWLINE out_block_counts = defaultdict(list)NEWLINE for o, o_repr in enumerate(self._out_type.representations):NEWLINE out_irreps_counts.append(out_irreps_counts[-1] + len(o_repr.irreps))NEWLINE out_block_counts[o_repr.name].append(o)NEWLINE NEWLINE in_irreps_counts = [0]NEWLINE in_block_counts = defaultdict(list)NEWLINE for i, i_repr in enumerate(self._in_type.representations):NEWLINE in_irreps_counts.append(in_irreps_counts[-1] + len(i_repr.irreps))NEWLINE in_block_counts[i_repr.name].append(i)NEWLINENEWLINE # iterate through the different group of blocksNEWLINE # i.e., through all input/output pairsNEWLINE idx = 0NEWLINE for reprs_names in self._representations_pairs:NEWLINENEWLINE block_expansion = getattr(self, f"block_expansion_{reprs_names}")NEWLINE NEWLINE for o in out_block_counts[reprs_names[1]]:NEWLINE out_irreps_count = out_irreps_counts[o]NEWLINE for i in in_block_counts[reprs_names[0]]:NEWLINE in_irreps_count = in_irreps_counts[i]NEWLINE NEWLINE # retrieve the attributes of each basis element and build a new list ofNEWLINE # attributes adding information specific to the current blockNEWLINE for attr in block_expansion.get_basis_info():NEWLINE attr = attr.copy()NEWLINE attr.update({NEWLINE "in_irreps_position": in_irreps_count + attr["in_irrep_idx"],NEWLINE "out_irreps_position": out_irreps_count + attr["out_irrep_idx"],NEWLINE "in_repr": reprs_names[0],NEWLINE "out_repr": reprs_names[1],NEWLINE "in_field_position": i,NEWLINE "out_field_position": o,NEWLINE })NEWLINE NEWLINE # build the ids of the basis vectorsNEWLINE # add names and indices of the input and output fieldsNEWLINE id = '({}-{},{}-{})'.format(reprs_names[0], i, reprs_names[1], o)NEWLINE # add the original id in the block submoduleNEWLINE id += "_" + attr["id"]NEWLINE NEWLINE # update with the new idNEWLINE attr["id"] = idNEWLINE NEWLINE attr["idx"] = idxNEWLINE idx += 1NEWLINE NEWLINE yield attrNEWLINENEWLINE def dimension(self) -> int:NEWLINE return len(self._ids_to_basis)NEWLINENEWLINE def _expand_block(self, weights, io_pair):NEWLINE # retrieve the basisNEWLINE block_expansion = getattr(self, f"block_expansion_{io_pair}")NEWLINENEWLINE # retrieve the linear coefficients for the basis expansionNEWLINE coefficients = weights[self._weights_ranges[io_pair][0]:self._weights_ranges[io_pair][1]]NEWLINE NEWLINE # reshape coefficients for the batch matrix multiplicationNEWLINE coefficients = coefficients.view(-1, block_expansion.dimension())NEWLINE NEWLINE # expand the current subset of basis vectors and set the result in the appropriate place in the filterNEWLINE filter = block_expansion(coefficients)NEWLINE k, o, i, p = filter.shapeNEWLINE NEWLINE filter = filter.view(self._out_count[io_pair[1]],NEWLINE self._in_count[io_pair[0]],NEWLINE o,NEWLINE i,NEWLINE self.S,NEWLINE )NEWLINE filter = filter.transpose(1, 2)NEWLINE return filterNEWLINE NEWLINE def forward(self, weights: torch.Tensor) -> torch.Tensor:NEWLINE """NEWLINE Forward step of the Module which expands the basis and returns the filter builtNEWLINENEWLINE Args:NEWLINE weights (torch.Tensor): the learnable weights used to linearly combine the basis filtersNEWLINENEWLINE Returns:NEWLINE the filter builtNEWLINENEWLINE """NEWLINE assert weights.shape[0] == self.dimension()NEWLINE assert len(weights.shape) == 1NEWLINE NEWLINE if self._n_pairs == 1:NEWLINE # if there is only one block (i.e. one type of input field and one type of output field),NEWLINE # we can return the expanded block immediately, instead of copying it inside a preallocated empty tensorNEWLINE io_pair = self._representations_pairs[0]NEWLINE in_indices = getattr(self, f"in_indices_{io_pair}")NEWLINE out_indices = getattr(self, f"out_indices_{io_pair}")NEWLINE filter = self._expand_block(weights, io_pair).reshape(out_indices[2], in_indices[2], self.S)NEWLINE NEWLINE else:NEWLINE NEWLINE # build the tensor which will contain te filterNEWLINE filter = torch.zeros(self._output_size, self._input_size, self.S, device=weights.device)NEWLINENEWLINE # iterate through all input-output field representations pairsNEWLINE for io_pair in self._representations_pairs:NEWLINE NEWLINE # retrieve the indicesNEWLINE in_indices = getattr(self, f"in_indices_{io_pair}")NEWLINE out_indices = getattr(self, f"out_indices_{io_pair}")NEWLINE NEWLINE # expand the current subset of basis vectors and set the result in the appropriate place in the filterNEWLINE expanded = self._expand_block(weights, io_pair)NEWLINE NEWLINE if self._contiguous[io_pair]:NEWLINE filter[NEWLINE out_indices[0]:out_indices[1],NEWLINE in_indices[0]:in_indices[1],NEWLINE :,NEWLINE ] = expanded.reshape(out_indices[2], in_indices[2], self.S)NEWLINE else:NEWLINE filter[NEWLINE out_indices,NEWLINE in_indices,NEWLINE :,NEWLINE ] = expanded.reshape(-1, self.S)NEWLINENEWLINE # return the new filterNEWLINE return filterNEWLINENEWLINENEWLINEdef _retrieve_indices(type: FieldType):NEWLINE fiber_position = 0NEWLINE _indices = defaultdict(list)NEWLINE _count = defaultdict(int)NEWLINE _contiguous = {}NEWLINE NEWLINE for repr in type.representations:NEWLINE _indices[repr.name] += list(range(fiber_position, fiber_position + repr.size))NEWLINE fiber_position += repr.sizeNEWLINE _count[repr.name] += 1NEWLINE NEWLINE for name, indices in _indices.items():NEWLINE # _contiguous[o_name] = indices == list(range(indices[0], indices[0]+len(indices)))NEWLINE _contiguous[name] = utils.check_consecutive_numbers(indices)NEWLINE _indices[name] = torch.LongTensor(indices)NEWLINE NEWLINE return _count, _indices, _contiguousNEWLINENEWLINENEWLINEdef _compute_attrs_and_ids(in_type, out_type, block_submodules):NEWLINE NEWLINE basis_ids = defaultdict(lambda: [])NEWLINE NEWLINE # iterate over all blocksNEWLINE # each block is associated to an input/output representations pairNEWLINE out_fiber_position = 0NEWLINE out_irreps_count = 0NEWLINE for o, o_repr in enumerate(out_type.representations):NEWLINE in_fiber_position = 0NEWLINE in_irreps_count = 0NEWLINE for i, i_repr in enumerate(in_type.representations):NEWLINE NEWLINE reprs_names = (i_repr.name, o_repr.name)NEWLINE NEWLINE # if a basis for the space of kernels between the current pair of representations existsNEWLINE if reprs_names in block_submodules:NEWLINE NEWLINE # retrieve the attributes of each basis element and build a new list ofNEWLINE # attributes adding information specific to the current blockNEWLINE ids = []NEWLINE for attr in block_submodules[reprs_names].get_basis_info():NEWLINE # build the ids of the basis vectorsNEWLINE # add names and indices of the input and output fieldsNEWLINE id = '({}-{},{}-{})'.format(i_repr.name, i, o_repr.name, o)NEWLINE # add the original id in the block submoduleNEWLINE id += "_" + attr["id"]NEWLINE NEWLINE ids.append(id)NEWLINENEWLINE # append the ids of the basis vectorsNEWLINE basis_ids[reprs_names] += idsNEWLINE NEWLINE in_fiber_position += i_repr.sizeNEWLINE in_irreps_count += len(i_repr.irreps)NEWLINE out_fiber_position += o_repr.sizeNEWLINE out_irreps_count += len(o_repr.irreps)NEWLINE NEWLINE # return attributes, basis_idsNEWLINE return basis_idsNEWLINE
import abcNEWLINEimport numpy as npNEWLINENEWLINEfrom . import _base_modelNEWLINENEWLINENEWLINEclass SklearnModel(_base_model.BaseModel, abc.ABC):NEWLINE """NEWLINE Parent class based on :obj:`~easyPheno.model._base_model.BaseModel` for all models with a sklearn-like API to shareNEWLINE functionalities. See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINENEWLINE **Attributes**NEWLINENEWLINE *Inherited attributes*NEWLINENEWLINE See :obj:`~easyPheno.model._base_model.BaseModel`NEWLINE """NEWLINENEWLINE def retrain(self, X_retrain: np.array, y_retrain: np.array):NEWLINE """NEWLINE Implementation of the retraining for models with sklearn-like API.NEWLINE See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINE """NEWLINE self.model.fit(X_retrain, np.ravel(y_retrain))NEWLINENEWLINE def predict(self, X_in: np.array) -> np.array:NEWLINE """NEWLINE Implementation of a prediction based on input features for models with sklearn-like API.NEWLINE See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINE """NEWLINE return np.reshape(self.model.predict(X_in), (-1, 1))NEWLINENEWLINE def train_val_loop(self, X_train: np.array, y_train: np.array, X_val: np.array, y_val: np.array) -> np.array:NEWLINE """NEWLINE Implementation of a train and validation loop for models with sklearn-like API.NEWLINE See :obj:`~easyPheno.model._base_model.BaseModel` for more information.NEWLINE """NEWLINE self.model.fit(X_train, np.ravel(y_train))NEWLINE return self.predict(X_in=X_val)NEWLINE
# coding=utf-8NEWLINE# *** WARNING: this file was generated by the Pulumi SDK Generator. ***NEWLINE# *** Do not edit by hand unless you're certain you know what you are doing! ***NEWLINENEWLINEimport warningsNEWLINEimport pulumiNEWLINEimport pulumi.runtimeNEWLINEfrom typing import Any, Mapping, Optional, Sequence, Union, overloadNEWLINEfrom ... import _utilitiesNEWLINENEWLINE__all__ = ['InstanceArgs', 'Instance']NEWLINENEWLINE@pulumi.input_typeNEWLINEclass InstanceArgs:NEWLINE def __init__(__self__, *,NEWLINE config: pulumi.Input[str],NEWLINE display_name: pulumi.Input[str],NEWLINE instance_id: pulumi.Input[str],NEWLINE labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,NEWLINE name: Optional[pulumi.Input[str]] = None,NEWLINE node_count: Optional[pulumi.Input[int]] = None,NEWLINE processing_units: Optional[pulumi.Input[int]] = None,NEWLINE project: Optional[pulumi.Input[str]] = None):NEWLINE """NEWLINE The set of arguments for constructing a Instance resource.NEWLINE :param pulumi.Input[str] config: The name of the instance's configuration. Values are of the form `projects//instanceConfigs/`. See also InstanceConfig and ListInstanceConfigs.NEWLINE :param pulumi.Input[str] display_name: The descriptive name for this instance as it appears in UIs. Must be unique per project and between 4 and 30 characters in length.NEWLINE :param pulumi.Input[str] instance_id: The ID of the instance to create. Valid identifiers are of the form `a-z*[a-z0-9]` and must be between 2 and 64 characters in length.NEWLINE :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Cloud Labels are a flexible and lightweight mechanism for organizing cloud resources into groups that reflect a customer's organizational needs and deployment strategies. Cloud Labels can be used to filter collections of resources. They can be used to control how resource metrics are aggregated. And they can be used as arguments to policy management rules (e.g. route, firewall, load balancing, etc.). * Label keys must be between 1 and 63 characters long and must conform to the following regular expression: `a-z{0,62}`. * Label values must be between 0 and 63 characters long and must conform to the regular expression `[a-z0-9_-]{0,63}`. * No more than 64 labels can be associated with a given resource. See https://goo.gl/xmQnxf for more information on and examples of labels. If you plan to use labels in your own code, please note that additional characters may be allowed in the future. And so you are advised to use an internal label representation, such as JSON, which doesn't rely upon specific characters being disallowed. For example, representing labels as the string: name + "_" + value would prove problematic if we were to allow "_" in a future release.NEWLINE :param pulumi.Input[str] name: A unique identifier for the instance, which cannot be changed after the instance is created. Values are of the form `projects//instances/a-z*[a-z0-9]`. The final segment of the name must be between 2 and 64 characters in length.NEWLINE :param pulumi.Input[int] node_count: The number of nodes allocated to this instance. At most one of either node_count or processing_units should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE :param pulumi.Input[int] processing_units: The number of processing units allocated to this instance. At most one of processing_units or node_count should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE """NEWLINE pulumi.set(__self__, "config", config)NEWLINE pulumi.set(__self__, "display_name", display_name)NEWLINE pulumi.set(__self__, "instance_id", instance_id)NEWLINE if labels is not None:NEWLINE pulumi.set(__self__, "labels", labels)NEWLINE if name is not None:NEWLINE pulumi.set(__self__, "name", name)NEWLINE if node_count is not None:NEWLINE pulumi.set(__self__, "node_count", node_count)NEWLINE if processing_units is not None:NEWLINE pulumi.set(__self__, "processing_units", processing_units)NEWLINE if project is not None:NEWLINE pulumi.set(__self__, "project", project)NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def config(self) -> pulumi.Input[str]:NEWLINE """NEWLINE The name of the instance's configuration. Values are of the form `projects//instanceConfigs/`. See also InstanceConfig and ListInstanceConfigs.NEWLINE """NEWLINE return pulumi.get(self, "config")NEWLINENEWLINE @config.setterNEWLINE def config(self, value: pulumi.Input[str]):NEWLINE pulumi.set(self, "config", value)NEWLINENEWLINE @propertyNEWLINE @pulumi.getter(name="displayName")NEWLINE def display_name(self) -> pulumi.Input[str]:NEWLINE """NEWLINE The descriptive name for this instance as it appears in UIs. Must be unique per project and between 4 and 30 characters in length.NEWLINE """NEWLINE return pulumi.get(self, "display_name")NEWLINENEWLINE @display_name.setterNEWLINE def display_name(self, value: pulumi.Input[str]):NEWLINE pulumi.set(self, "display_name", value)NEWLINENEWLINE @propertyNEWLINE @pulumi.getter(name="instanceId")NEWLINE def instance_id(self) -> pulumi.Input[str]:NEWLINE """NEWLINE The ID of the instance to create. Valid identifiers are of the form `a-z*[a-z0-9]` and must be between 2 and 64 characters in length.NEWLINE """NEWLINE return pulumi.get(self, "instance_id")NEWLINENEWLINE @instance_id.setterNEWLINE def instance_id(self, value: pulumi.Input[str]):NEWLINE pulumi.set(self, "instance_id", value)NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def labels(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:NEWLINE """NEWLINE Cloud Labels are a flexible and lightweight mechanism for organizing cloud resources into groups that reflect a customer's organizational needs and deployment strategies. Cloud Labels can be used to filter collections of resources. They can be used to control how resource metrics are aggregated. And they can be used as arguments to policy management rules (e.g. route, firewall, load balancing, etc.). * Label keys must be between 1 and 63 characters long and must conform to the following regular expression: `a-z{0,62}`. * Label values must be between 0 and 63 characters long and must conform to the regular expression `[a-z0-9_-]{0,63}`. * No more than 64 labels can be associated with a given resource. See https://goo.gl/xmQnxf for more information on and examples of labels. If you plan to use labels in your own code, please note that additional characters may be allowed in the future. And so you are advised to use an internal label representation, such as JSON, which doesn't rely upon specific characters being disallowed. For example, representing labels as the string: name + "_" + value would prove problematic if we were to allow "_" in a future release.NEWLINE """NEWLINE return pulumi.get(self, "labels")NEWLINENEWLINE @labels.setterNEWLINE def labels(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):NEWLINE pulumi.set(self, "labels", value)NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def name(self) -> Optional[pulumi.Input[str]]:NEWLINE """NEWLINE A unique identifier for the instance, which cannot be changed after the instance is created. Values are of the form `projects//instances/a-z*[a-z0-9]`. The final segment of the name must be between 2 and 64 characters in length.NEWLINE """NEWLINE return pulumi.get(self, "name")NEWLINENEWLINE @name.setterNEWLINE def name(self, value: Optional[pulumi.Input[str]]):NEWLINE pulumi.set(self, "name", value)NEWLINENEWLINE @propertyNEWLINE @pulumi.getter(name="nodeCount")NEWLINE def node_count(self) -> Optional[pulumi.Input[int]]:NEWLINE """NEWLINE The number of nodes allocated to this instance. At most one of either node_count or processing_units should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE """NEWLINE return pulumi.get(self, "node_count")NEWLINENEWLINE @node_count.setterNEWLINE def node_count(self, value: Optional[pulumi.Input[int]]):NEWLINE pulumi.set(self, "node_count", value)NEWLINENEWLINE @propertyNEWLINE @pulumi.getter(name="processingUnits")NEWLINE def processing_units(self) -> Optional[pulumi.Input[int]]:NEWLINE """NEWLINE The number of processing units allocated to this instance. At most one of processing_units or node_count should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE """NEWLINE return pulumi.get(self, "processing_units")NEWLINENEWLINE @processing_units.setterNEWLINE def processing_units(self, value: Optional[pulumi.Input[int]]):NEWLINE pulumi.set(self, "processing_units", value)NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def project(self) -> Optional[pulumi.Input[str]]:NEWLINE return pulumi.get(self, "project")NEWLINENEWLINE @project.setterNEWLINE def project(self, value: Optional[pulumi.Input[str]]):NEWLINE pulumi.set(self, "project", value)NEWLINENEWLINENEWLINEclass Instance(pulumi.CustomResource):NEWLINE @overloadNEWLINE def __init__(__self__,NEWLINE resource_name: str,NEWLINE opts: Optional[pulumi.ResourceOptions] = None,NEWLINE config: Optional[pulumi.Input[str]] = None,NEWLINE display_name: Optional[pulumi.Input[str]] = None,NEWLINE instance_id: Optional[pulumi.Input[str]] = None,NEWLINE labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,NEWLINE name: Optional[pulumi.Input[str]] = None,NEWLINE node_count: Optional[pulumi.Input[int]] = None,NEWLINE processing_units: Optional[pulumi.Input[int]] = None,NEWLINE project: Optional[pulumi.Input[str]] = None,NEWLINE __props__=None):NEWLINE """NEWLINE Creates an instance and begins preparing it to begin serving. The returned long-running operation can be used to track the progress of preparing the new instance. The instance name is assigned by the caller. If the named instance already exists, `CreateInstance` returns `ALREADY_EXISTS`. Immediately upon completion of this request: * The instance is readable via the API, with all requested attributes but no allocated resources. Its state is `CREATING`. Until completion of the returned operation: * Cancelling the operation renders the instance immediately unreadable via the API. * The instance can be deleted. * All other attempts to modify the instance are rejected. Upon completion of the returned operation: * Billing for all successfully-allocated resources begins (some types may have lower than the requested levels). * Databases can be created in the instance. * The instance's allocated resource levels are readable via the API. * The instance's state becomes `READY`. The returned long-running operation will have a name of the format `/operations/` and can be used to track creation of the instance. The metadata field type is CreateInstanceMetadata. The response field type is Instance, if successful.NEWLINENEWLINE :param str resource_name: The name of the resource.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE :param pulumi.Input[str] config: The name of the instance's configuration. Values are of the form `projects//instanceConfigs/`. See also InstanceConfig and ListInstanceConfigs.NEWLINE :param pulumi.Input[str] display_name: The descriptive name for this instance as it appears in UIs. Must be unique per project and between 4 and 30 characters in length.NEWLINE :param pulumi.Input[str] instance_id: The ID of the instance to create. Valid identifiers are of the form `a-z*[a-z0-9]` and must be between 2 and 64 characters in length.NEWLINE :param pulumi.Input[Mapping[str, pulumi.Input[str]]] labels: Cloud Labels are a flexible and lightweight mechanism for organizing cloud resources into groups that reflect a customer's organizational needs and deployment strategies. Cloud Labels can be used to filter collections of resources. They can be used to control how resource metrics are aggregated. And they can be used as arguments to policy management rules (e.g. route, firewall, load balancing, etc.). * Label keys must be between 1 and 63 characters long and must conform to the following regular expression: `a-z{0,62}`. * Label values must be between 0 and 63 characters long and must conform to the regular expression `[a-z0-9_-]{0,63}`. * No more than 64 labels can be associated with a given resource. See https://goo.gl/xmQnxf for more information on and examples of labels. If you plan to use labels in your own code, please note that additional characters may be allowed in the future. And so you are advised to use an internal label representation, such as JSON, which doesn't rely upon specific characters being disallowed. For example, representing labels as the string: name + "_" + value would prove problematic if we were to allow "_" in a future release.NEWLINE :param pulumi.Input[str] name: A unique identifier for the instance, which cannot be changed after the instance is created. Values are of the form `projects//instances/a-z*[a-z0-9]`. The final segment of the name must be between 2 and 64 characters in length.NEWLINE :param pulumi.Input[int] node_count: The number of nodes allocated to this instance. At most one of either node_count or processing_units should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE :param pulumi.Input[int] processing_units: The number of processing units allocated to this instance. At most one of processing_units or node_count should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE """NEWLINE ...NEWLINE @overloadNEWLINE def __init__(__self__,NEWLINE resource_name: str,NEWLINE args: InstanceArgs,NEWLINE opts: Optional[pulumi.ResourceOptions] = None):NEWLINE """NEWLINE Creates an instance and begins preparing it to begin serving. The returned long-running operation can be used to track the progress of preparing the new instance. The instance name is assigned by the caller. If the named instance already exists, `CreateInstance` returns `ALREADY_EXISTS`. Immediately upon completion of this request: * The instance is readable via the API, with all requested attributes but no allocated resources. Its state is `CREATING`. Until completion of the returned operation: * Cancelling the operation renders the instance immediately unreadable via the API. * The instance can be deleted. * All other attempts to modify the instance are rejected. Upon completion of the returned operation: * Billing for all successfully-allocated resources begins (some types may have lower than the requested levels). * Databases can be created in the instance. * The instance's allocated resource levels are readable via the API. * The instance's state becomes `READY`. The returned long-running operation will have a name of the format `/operations/` and can be used to track creation of the instance. The metadata field type is CreateInstanceMetadata. The response field type is Instance, if successful.NEWLINENEWLINE :param str resource_name: The name of the resource.NEWLINE :param InstanceArgs args: The arguments to use to populate this resource's properties.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE """NEWLINE ...NEWLINE def __init__(__self__, resource_name: str, *args, **kwargs):NEWLINE resource_args, opts = _utilities.get_resource_args_opts(InstanceArgs, pulumi.ResourceOptions, *args, **kwargs)NEWLINE if resource_args is not None:NEWLINE __self__._internal_init(resource_name, opts, **resource_args.__dict__)NEWLINE else:NEWLINE __self__._internal_init(resource_name, *args, **kwargs)NEWLINENEWLINE def _internal_init(__self__,NEWLINE resource_name: str,NEWLINE opts: Optional[pulumi.ResourceOptions] = None,NEWLINE config: Optional[pulumi.Input[str]] = None,NEWLINE display_name: Optional[pulumi.Input[str]] = None,NEWLINE instance_id: Optional[pulumi.Input[str]] = None,NEWLINE labels: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,NEWLINE name: Optional[pulumi.Input[str]] = None,NEWLINE node_count: Optional[pulumi.Input[int]] = None,NEWLINE processing_units: Optional[pulumi.Input[int]] = None,NEWLINE project: Optional[pulumi.Input[str]] = None,NEWLINE __props__=None):NEWLINE if opts is None:NEWLINE opts = pulumi.ResourceOptions()NEWLINE if not isinstance(opts, pulumi.ResourceOptions):NEWLINE raise TypeError('Expected resource options to be a ResourceOptions instance')NEWLINE if opts.version is None:NEWLINE opts.version = _utilities.get_version()NEWLINE if opts.id is None:NEWLINE if __props__ is not None:NEWLINE raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')NEWLINE __props__ = InstanceArgs.__new__(InstanceArgs)NEWLINENEWLINE if config is None and not opts.urn:NEWLINE raise TypeError("Missing required property 'config'")NEWLINE __props__.__dict__["config"] = configNEWLINE if display_name is None and not opts.urn:NEWLINE raise TypeError("Missing required property 'display_name'")NEWLINE __props__.__dict__["display_name"] = display_nameNEWLINE if instance_id is None and not opts.urn:NEWLINE raise TypeError("Missing required property 'instance_id'")NEWLINE __props__.__dict__["instance_id"] = instance_idNEWLINE __props__.__dict__["labels"] = labelsNEWLINE __props__.__dict__["name"] = nameNEWLINE __props__.__dict__["node_count"] = node_countNEWLINE __props__.__dict__["processing_units"] = processing_unitsNEWLINE __props__.__dict__["project"] = projectNEWLINE __props__.__dict__["state"] = NoneNEWLINE super(Instance, __self__).__init__(NEWLINE 'google-native:spanner/v1:Instance',NEWLINE resource_name,NEWLINE __props__,NEWLINE opts)NEWLINENEWLINE @staticmethodNEWLINE def get(resource_name: str,NEWLINE id: pulumi.Input[str],NEWLINE opts: Optional[pulumi.ResourceOptions] = None) -> 'Instance':NEWLINE """NEWLINE Get an existing Instance resource's state with the given name, id, and optional extraNEWLINE properties used to qualify the lookup.NEWLINENEWLINE :param str resource_name: The unique name of the resulting resource.NEWLINE :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.NEWLINE :param pulumi.ResourceOptions opts: Options for the resource.NEWLINE """NEWLINE opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))NEWLINENEWLINE __props__ = InstanceArgs.__new__(InstanceArgs)NEWLINENEWLINE __props__.__dict__["config"] = NoneNEWLINE __props__.__dict__["display_name"] = NoneNEWLINE __props__.__dict__["labels"] = NoneNEWLINE __props__.__dict__["name"] = NoneNEWLINE __props__.__dict__["node_count"] = NoneNEWLINE __props__.__dict__["processing_units"] = NoneNEWLINE __props__.__dict__["state"] = NoneNEWLINE return Instance(resource_name, opts=opts, __props__=__props__)NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def config(self) -> pulumi.Output[str]:NEWLINE """NEWLINE The name of the instance's configuration. Values are of the form `projects//instanceConfigs/`. See also InstanceConfig and ListInstanceConfigs.NEWLINE """NEWLINE return pulumi.get(self, "config")NEWLINENEWLINE @propertyNEWLINE @pulumi.getter(name="displayName")NEWLINE def display_name(self) -> pulumi.Output[str]:NEWLINE """NEWLINE The descriptive name for this instance as it appears in UIs. Must be unique per project and between 4 and 30 characters in length.NEWLINE """NEWLINE return pulumi.get(self, "display_name")NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def labels(self) -> pulumi.Output[Mapping[str, str]]:NEWLINE """NEWLINE Cloud Labels are a flexible and lightweight mechanism for organizing cloud resources into groups that reflect a customer's organizational needs and deployment strategies. Cloud Labels can be used to filter collections of resources. They can be used to control how resource metrics are aggregated. And they can be used as arguments to policy management rules (e.g. route, firewall, load balancing, etc.). * Label keys must be between 1 and 63 characters long and must conform to the following regular expression: `a-z{0,62}`. * Label values must be between 0 and 63 characters long and must conform to the regular expression `[a-z0-9_-]{0,63}`. * No more than 64 labels can be associated with a given resource. See https://goo.gl/xmQnxf for more information on and examples of labels. If you plan to use labels in your own code, please note that additional characters may be allowed in the future. And so you are advised to use an internal label representation, such as JSON, which doesn't rely upon specific characters being disallowed. For example, representing labels as the string: name + "_" + value would prove problematic if we were to allow "_" in a future release.NEWLINE """NEWLINE return pulumi.get(self, "labels")NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def name(self) -> pulumi.Output[str]:NEWLINE """NEWLINE A unique identifier for the instance, which cannot be changed after the instance is created. Values are of the form `projects//instances/a-z*[a-z0-9]`. The final segment of the name must be between 2 and 64 characters in length.NEWLINE """NEWLINE return pulumi.get(self, "name")NEWLINENEWLINE @propertyNEWLINE @pulumi.getter(name="nodeCount")NEWLINE def node_count(self) -> pulumi.Output[int]:NEWLINE """NEWLINE The number of nodes allocated to this instance. At most one of either node_count or processing_units should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE """NEWLINE return pulumi.get(self, "node_count")NEWLINENEWLINE @propertyNEWLINE @pulumi.getter(name="processingUnits")NEWLINE def processing_units(self) -> pulumi.Output[int]:NEWLINE """NEWLINE The number of processing units allocated to this instance. At most one of processing_units or node_count should be present in the message. This may be zero in API responses for instances that are not yet in state `READY`. See [the documentation](https://cloud.google.com/spanner/docs/compute-capacity) for more information about nodes and processing units.NEWLINE """NEWLINE return pulumi.get(self, "processing_units")NEWLINENEWLINE @propertyNEWLINE @pulumi.getterNEWLINE def state(self) -> pulumi.Output[str]:NEWLINE """NEWLINE The current instance state. For CreateInstance, the state must be either omitted or set to `CREATING`. For UpdateInstance, the state must be either omitted or set to `READY`.NEWLINE """NEWLINE return pulumi.get(self, "state")NEWLINENEWLINE
import sysNEWLINEsys.path.append('build')NEWLINEimport MatterSimNEWLINEimport timeNEWLINEimport mathNEWLINEimport cv2NEWLINEimport numpy as npNEWLINENEWLINEWIDTH = 800NEWLINEHEIGHT = 600NEWLINEVFOV = math.radians(60)NEWLINEHFOV = VFOV*WIDTH/HEIGHTNEWLINETEXT_COLOR = [230, 40, 40]NEWLINENEWLINEcv2.namedWindow('Python RGB')NEWLINEcv2.namedWindow('Python Depth')NEWLINENEWLINEsim = MatterSim.Simulator()NEWLINEsim.setCameraResolution(WIDTH, HEIGHT)NEWLINEsim.setCameraVFOV(VFOV)NEWLINEsim.setDepthEnabled(True)NEWLINEsim.initialize()NEWLINE#sim.newEpisode(['2t7WUuJeko7'], ['1e6b606b44df4a6086c0f97e826d4d15'], [0], [0])NEWLINE#sim.newEpisode(['1LXtFkjw3qL'], ['0b22fa63d0f54a529c525afbf2e8bb25'], [0], [0])NEWLINEsim.newRandomEpisode(['1LXtFkjw3qL'])NEWLINENEWLINEheading = 0NEWLINEelevation = 0NEWLINElocation = 0NEWLINEANGLEDELTA = 5 * math.pi / 180NEWLINENEWLINEprint '\nPython Demo'NEWLINEprint 'Use arrow keys to move the camera.'NEWLINEprint 'Use number keys (not numpad) to move to nearby viewpoints indicated in the RGB view.\n'NEWLINENEWLINEwhile True:NEWLINE sim.makeAction([location], [heading], [elevation])NEWLINE location = 0NEWLINE heading = 0NEWLINE elevation = 0NEWLINENEWLINE state = sim.getState()[0]NEWLINE locations = state.navigableLocationsNEWLINE rgb = np.array(state.rgb, copy=False)NEWLINE for idx, loc in enumerate(locations[1:]):NEWLINE # Draw actions on the screenNEWLINE fontScale = 3.0/loc.rel_distanceNEWLINE x = int(WIDTH/2 + loc.rel_heading/HFOV*WIDTH)NEWLINE y = int(HEIGHT/2 - loc.rel_elevation/VFOV*HEIGHT)NEWLINE cv2.putText(rgb, str(idx + 1), (x, y), cv2.FONT_HERSHEY_SIMPLEX, NEWLINE fontScale, TEXT_COLOR, thickness=3)NEWLINE cv2.imshow('Python RGB', rgb)NEWLINENEWLINE depth = np.array(state.depth, copy=False)NEWLINE cv2.imshow('Python Depth', depth)NEWLINE k = cv2.waitKey(1)NEWLINE if k == -1:NEWLINE continueNEWLINE else:NEWLINE k = (k & 255)NEWLINE if k == ord('q'):NEWLINE breakNEWLINE elif ord('1') <= k <= ord('9'):NEWLINE location = k - ord('0')NEWLINE if location >= len(locations):NEWLINE location = 0NEWLINE elif k == 81 or k == ord('a'):NEWLINE heading = -ANGLEDELTANEWLINE elif k == 82 or k == ord('w'):NEWLINE elevation = ANGLEDELTANEWLINE elif k == 83 or k == ord('d'):NEWLINE heading = ANGLEDELTANEWLINE elif k == 84 or k == ord('s'):NEWLINE elevation = -ANGLEDELTANEWLINE
#!/usr/bin/env pythonNEWLINENEWLINE#NEWLINE# Licensed to the Apache Software Foundation (ASF) under one or moreNEWLINE# contributor license agreements. See the NOTICE file distributed withNEWLINE# this work for additional information regarding copyright ownership.NEWLINE# The ASF licenses this file to You under the Apache License, Version 2.0NEWLINE# (the "License"); you may not use this file except in compliance withNEWLINE# the License. You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE#NEWLINE"""NEWLINECreated Date: June 11 2018NEWLINEAuthor: Zsolt NagyNEWLINEVersion: V1.1 ( Automotive - Lambda example )NEWLINECopyright (c) 2018 T-SystemsNEWLINENEWLINE Stream processing of Connected Car Data NEWLINEOperation:NEWLINE 1. UTF8 encoded, JSON DATA received from Kafka in every 1 seconds.NEWLINE 2. Data converterted to DF and registered TEMP Hive Table NEWLINE 3. Alarm SQL executed in current windows dataset if alarm adding to ALARM TABLENEWLINE 4. All data save to HIVE EXTERNAL TABLE (Hive or OBS(S3) for later batch processing NEWLINE Usage: lambda_speedlayer.pyNEWLINE """NEWLINEfrom __future__ import print_functionNEWLINEimport sysNEWLINEimport osNEWLINEfrom pyspark import SparkContextNEWLINEfrom pyspark.streaming import StreamingContextNEWLINEfrom pyspark.streaming.kafka import KafkaUtilsNEWLINENEWLINEfrom pyspark.sql import Row, SQLContextNEWLINEfrom pyspark.sql.types import *NEWLINEimport jsonNEWLINEimport datetimeNEWLINEfrom pyspark.sql import HiveContextNEWLINEfrom pyspark import SparkFilesNEWLINENEWLINElog4jLogger = None NEWLINEfrom lambda_config import ConfigNEWLINENEWLINEconfig = Config()NEWLINENEWLINEdef load_resource_file(res_file):NEWLINE res_file = os.path.join(os.path.dirname(os.path.realpath(__file__)) , "../conf", res_file )NEWLINE with open(res_file, 'r') as tfile:NEWLINE ret = tfile.read()NEWLINE return retNEWLINENEWLINENEWLINEdef load_resource_file_from_spark_file(res_file_name):NEWLINE with open(SparkFiles.get(res_file_name)) as test_file:NEWLINE alertsql=test_file.read()NEWLINE return alertsqlNEWLINENEWLINENEWLINEif __name__ == "__main__":NEWLINE if len(sys.argv) > 1:NEWLINE print("Usage: bin/runspeed.sh", file=sys.stderr)NEWLINE exit(-1)NEWLINE sc = SparkContext(appName="PythonStreamingKafkaLambda")NEWLINE log4jLogger = sc._jvm.org.apache.log4jNEWLINE logging = log4jLogger.LogManager.getLogger(__name__)NEWLINE logging.info("pyspark script logger initialized")NEWLINENEWLINE ssc = StreamingContext(sc, 1)NEWLINE NEWLINE # gets broksers and topic from config NEWLINE brokers=config.get_lambda_config("kafka_data_producer","broker_list")NEWLINE topic=config.get_lambda_config("kafka_data_producer","topic")NEWLINE # Streaming context getting data Car JSON data from specific topic NEWLINE kvs = KafkaUtils.createDirectStream(ssc, [topic], {"metadata.broker.list": brokers})NEWLINE lines = kvs.map(lambda x: x[1])NEWLINENEWLINE def process(time, rdd):NEWLINE print("========= %s =========" % str(time))NEWLINENEWLINE try:NEWLINE sqlContext = HiveContext(sc)NEWLINE # FIX: memory error Spark 2.0 bug ( < 2.0 )NEWLINE sqlContext.setConf("spark.sql.tungsten.enabled","false")NEWLINENEWLINE if rdd.count() < 1:NEWLINE return;NEWLINENEWLINE sqlRdd = rdd.map( lambda x: json.loads(x)).map(lambda r: Row( messageid=r["messageid"], messagedate=datetime.datetime.strptime(r["messagedate"], '%Y%m%d%H%M%S'), value=r["value"], metrics=r["metrics"], name=r["name"] ) )NEWLINE speedDataFrame = sqlContext.createDataFrame(sqlRdd)NEWLINENEWLINE batch_table_name=config.get_lambda_config( "lambda_speedlayer","speed_batch_table")NEWLINE speedDataFrame.write.mode("append").saveAsTable(batch_table_name)NEWLINENEWLINE # if S3 vals defined then save also to OBS (s3)NEWLINE s3_full_path=config.get_lambda_config( "lambda_speedlayer","s3_full_path")NEWLINE if s3_full_path and False:NEWLINE speedDataFrame.write.parquet(s3_full_path,mode="append")NEWLINENEWLINE speedDataFrame.show()NEWLINE # Creates a temporary view using the DataFrame.NEWLINE temp_table_name=config.get_lambda_config( "lambda_speedlayer","speed_temp_table")NEWLINE speedDataFrame.registerTempTable(temp_table_name)NEWLINENEWLINE if __debug__:NEWLINE speedDataFrame.printSchema()NEWLINE speedDataFrame.head( 10 )NEWLINE NEWLINE # handling sql alert fileNEWLINE alertsqlfile=config.get_lambda_config( "lambda_speedlayer","alert_sql_path")NEWLINE NEWLINE alertsql = load_resource_file( alertsqlfile )NEWLINE # Execute alarm query and get the alam dataset using the temp tableNEWLINE alertDataFrame = sqlContext.sql(alertsql)NEWLINE alertDataFrame.show()NEWLINE alertDataFrame.printSchema()NEWLINENEWLINE # save all values to HBASENEWLINE # IF NEED FILTER LATER .filter(lambda x: str(x["metrics"])=='action-credit-limit') \NEWLINE # create HBASE mapperNEWLINE rowRdd = rdd.map( lambda x: json.loads(x))\NEWLINE .map(lambda r: ( str(r["metrics"]) ,[ str(r["name"])+"-"+datetime.datetime.now().strftime("%Y%m%d%H%M%S"), "driver" if "driver" in str(r["metrics"]) else "car", str(r["metrics"]), str(r["value"]) ] ))NEWLINENEWLINE table = config.get_lambda_config( "lambda_speedlayer","speed_inbox_table")NEWLINE host = config.get_lambda_config( "lambda_speedlayer","hbase_host")NEWLINE keyConv = "org.apache.spark.examples.pythonconverters.StringToImmutableBytesWritableConverter"NEWLINE valueConv = "org.apache.spark.examples.pythonconverters.StringListToPutConverter"NEWLINE conf = {"hbase.zookeeper.quorum": host,NEWLINE "hbase.mapred.outputtable": table,NEWLINE "mapreduce.outputformat.class": "org.apache.hadoop.hbase.mapreduce.TableOutputFormat",NEWLINE "mapreduce.job.output.key.class": "org.apache.hadoop.hbase.io.ImmutableBytesWritable",NEWLINE "mapreduce.job.output.value.class": "org.apache.hadoop.io.Writable"}NEWLINE rowRdd.saveAsNewAPIHadoopDataset(conf=conf,keyConverter=keyConv,valueConverter=valueConv)NEWLINE except Exception as streamerror:NEWLINE logging.error( "Stream error:",streamerror )NEWLINE print (streamerror)NEWLINE raiseNEWLINENEWLINE lines.foreachRDD(process)NEWLINE lines.pprint()NEWLINE ssc.start()NEWLINE ssc.awaitTermination()NEWLINENEWLINE
# Licensed to the Apache Software Foundation (ASF) under one or moreNEWLINE# contributor license agreements. See the NOTICE file distributed withNEWLINE# this work for additional information regarding copyright ownership.NEWLINE# The ASF licenses this file to You under the Apache License, Version 2.0NEWLINE# (the "License"); you may not use this file except in compliance withNEWLINE# the License. You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINEimport sysNEWLINEimport unittestNEWLINEfrom libcloud.utils.py3 import httplibNEWLINENEWLINEfrom libcloud.compute.base import Node, NodeSize, NodeImage, NodeLocationNEWLINEfrom libcloud.compute.drivers.voxel import VoxelNodeDriver as VoxelNEWLINEfrom libcloud.compute.types import InvalidCredsErrorNEWLINENEWLINEfrom libcloud.test import MockHttpNEWLINEfrom libcloud.test.file_fixtures import ComputeFileFixturesNEWLINENEWLINEfrom libcloud.test.secrets import VOXEL_PARAMSNEWLINENEWLINENEWLINEclass VoxelTest(unittest.TestCase):NEWLINENEWLINE def setUp(self):NEWLINENEWLINE Voxel.connectionCls.conn_class = VoxelMockHttpNEWLINE VoxelMockHttp.type = NoneNEWLINE self.driver = Voxel(*VOXEL_PARAMS)NEWLINENEWLINE def test_auth_failed(self):NEWLINE VoxelMockHttp.type = 'UNAUTHORIZED'NEWLINE try:NEWLINE self.driver.list_nodes()NEWLINE except Exception as e:NEWLINE self.assertTrue(isinstance(e, InvalidCredsError))NEWLINE else:NEWLINE self.fail('test should have thrown')NEWLINENEWLINE def test_response_failure(self):NEWLINE VoxelMockHttp.type = 'FAILURE'NEWLINENEWLINE try:NEWLINE self.driver.list_nodes()NEWLINE except Exception:NEWLINE passNEWLINE else:NEWLINE self.fail('Invalid response, but exception was not thrown')NEWLINENEWLINE def test_list_nodes(self):NEWLINE VoxelMockHttp.type = 'LIST_NODES'NEWLINE nodes = self.driver.list_nodes()NEWLINENEWLINE self.assertEqual(len(nodes), 1)NEWLINE self.assertEqual(nodes[0].name, 'www.voxel.net')NEWLINENEWLINE def test_list_sizes(self):NEWLINE sizes = self.driver.list_sizes()NEWLINENEWLINE self.assertEqual(len(sizes), 13)NEWLINENEWLINE def test_list_images(self):NEWLINE VoxelMockHttp.type = 'LIST_IMAGES'NEWLINE images = self.driver.list_images()NEWLINENEWLINE self.assertEqual(len(images), 1)NEWLINENEWLINE def test_list_locations(self):NEWLINE VoxelMockHttp.type = 'LIST_LOCATIONS'NEWLINE locations = self.driver.list_locations()NEWLINENEWLINE self.assertEqual(len(locations), 2)NEWLINE self.assertEqual(locations[0].name, 'Amsterdam')NEWLINENEWLINE def test_create_node_invalid_disk_size(self):NEWLINE image = NodeImage(NEWLINE id=1, name='Ubuntu 8.10 (intrepid)', driver=self.driver)NEWLINE size = NodeSize(NEWLINE 1, '256 slice', None, None, None, None, driver=self.driver)NEWLINE location = NodeLocation(id=1, name='Europe', country='England',NEWLINE driver=self.driver)NEWLINENEWLINE try:NEWLINE self.driver.create_node(name='foo', image=image, size=size,NEWLINE location=location)NEWLINE except ValueError:NEWLINE passNEWLINE else:NEWLINE self.fail('Invalid disk size provided but an exception was not'NEWLINE ' thrown')NEWLINENEWLINE def test_create_node(self):NEWLINE VoxelMockHttp.type = 'CREATE_NODE'NEWLINE image = NodeImage(NEWLINE id=1, name='Ubuntu 8.10 (intrepid)', driver=self.driver)NEWLINE size = NodeSize(NEWLINE 1, '256 slice', 1024, 500, None, None, driver=self.driver)NEWLINE location = NodeLocation(id=1, name='Europe', country='England',NEWLINE driver=self.driver)NEWLINENEWLINE node = self.driver.create_node(name='foo', image=image, size=size,NEWLINE location=location)NEWLINE self.assertEqual(node.id, '1234')NEWLINENEWLINE node = self.driver.create_node(name='foo', image=image, size=size,NEWLINE location=location, ex_voxel_access=True)NEWLINE self.assertEqual(node.id, '1234')NEWLINENEWLINE def test_reboot_node(self):NEWLINE VoxelMockHttp.type = 'REBOOT_NODE'NEWLINE node = Node(NEWLINE id=72258, name=None, state=None, public_ips=None, private_ips=None,NEWLINE driver=self.driver)NEWLINENEWLINE self.assertTrue(node.reboot())NEWLINENEWLINE def test_destroy_node(self):NEWLINE VoxelMockHttp.type = 'DESTROY_NODE'NEWLINE node = Node(NEWLINE id=72258, name=None, state=None, public_ips=None, private_ips=None,NEWLINE driver=self.driver)NEWLINENEWLINE self.assertTrue(node.destroy())NEWLINENEWLINENEWLINEclass VoxelMockHttp(MockHttp):NEWLINENEWLINE fixtures = ComputeFileFixtures('voxel')NEWLINENEWLINE def _UNAUTHORIZED(self, method, url, body, headers):NEWLINE body = self.fixtures.load('unauthorized.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _FAILURE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('failure.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _LIST_NODES(self, method, url, body, headers):NEWLINE body = self.fixtures.load('nodes.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _LIST_IMAGES(self, method, url, body, headers):NEWLINE body = self.fixtures.load('images.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _LIST_LOCATIONS(self, method, url, body, headers):NEWLINE body = self.fixtures.load('locations.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _CREATE_NODE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('create_node.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _REBOOT_NODE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('success.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINE def _DESTROY_NODE(self, method, url, body, headers):NEWLINE body = self.fixtures.load('success.xml')NEWLINE return (httplib.OK, body, {}, httplib.responses[httplib.OK])NEWLINENEWLINEif __name__ == '__main__':NEWLINE sys.exit(unittest.main())NEWLINE
# qubit number=4NEWLINE# total number=45NEWLINEimport pyquilNEWLINEfrom pyquil.api import local_forest_runtime, QVMConnectionNEWLINEfrom pyquil import Program, get_qcNEWLINEfrom pyquil.gates import *NEWLINEimport numpy as npNEWLINENEWLINEconn = QVMConnection()NEWLINENEWLINEdef make_circuit()-> Program:NEWLINENEWLINE prog = Program() # circuit beginNEWLINENEWLINE prog += H(3) # number=32NEWLINE prog += CZ(0,3) # number=33NEWLINE prog += H(3) # number=34NEWLINE prog += H(3) # number=26NEWLINE prog += CZ(0,3) # number=27NEWLINE prog += H(3) # number=28NEWLINE prog += X(3) # number=24NEWLINE prog += CNOT(0,3) # number=25NEWLINE prog += CNOT(0,3) # number=12NEWLINE prog += H(2) # number=29NEWLINE prog += CZ(0,2) # number=30NEWLINE prog += H(2) # number=31NEWLINE prog += X(2) # number=21NEWLINE prog += H(2) # number=39NEWLINE prog += CZ(0,2) # number=40NEWLINE prog += H(2) # number=41NEWLINENEWLINE prog += H(1) # number=2NEWLINE prog += H(2) # number=3NEWLINE prog += H(3) # number=4NEWLINE prog += H(0) # number=5NEWLINE prog += Y(3) # number=36NEWLINE prog += H(3) # number=16NEWLINE prog += CZ(1,3) # number=17NEWLINE prog += H(3) # number=18NEWLINENEWLINE prog += H(1) # number=6NEWLINE prog += H(2) # number=37NEWLINE prog += Z(1) # number=35NEWLINE prog += Y(3) # number=38NEWLINE prog += H(2) # number=7NEWLINE prog += H(3) # number=8NEWLINE prog += H(0) # number=9NEWLINE prog += H(0) # number=42NEWLINE prog += CZ(3,0) # number=43NEWLINE prog += H(0) # number=44NEWLINE prog += CNOT(3,0) # number=14NEWLINE # circuit endNEWLINENEWLINE return progNEWLINENEWLINEdef summrise_results(bitstrings) -> dict:NEWLINE d = {}NEWLINE for l in bitstrings:NEWLINE if d.get(l) is None:NEWLINE d[l] = 1NEWLINE else:NEWLINE d[l] = d[l] + 1NEWLINENEWLINE return dNEWLINENEWLINEif __name__ == '__main__':NEWLINE prog = make_circuit()NEWLINE qvm = get_qc('4q-qvm')NEWLINENEWLINE results = qvm.run_and_measure(prog,1024)NEWLINE bitstrings = np.vstack([results[i] for i in qvm.qubits()]).TNEWLINE bitstrings = [''.join(map(str, l)) for l in bitstrings]NEWLINE writefile = open("../data/startPyquil3216.csv","w")NEWLINE print(summrise_results(bitstrings),file=writefile)NEWLINE writefile.close()NEWLINENEWLINE
NEWLINEimport numpy as npNEWLINENEWLINEclass Variable():NEWLINENEWLINE def __init__(self, data, creator=None):NEWLINENEWLINE if data is None:NEWLINE raise ValueError("data is not allowed to be None.")NEWLINE if not isinstance(data, np.ndarray):NEWLINE data = np.array(data)NEWLINE NEWLINE self.data = dataNEWLINE self.grad = np.zeros_like(self.data)NEWLINE self.creator = creatorNEWLINENEWLINE def backward(self, init=True):NEWLINE NEWLINE if init is True:NEWLINE self.grad = np.ones_like(self.data)NEWLINENEWLINE funcs = [self.creator] if self.creator is not None else NoneNEWLINE while funcs:NEWLINE f = funcs.pop()NEWLINE y, x = f.output, f.inputNEWLINE x.grad += f.backward(y.grad)NEWLINE if x.creator is not None:NEWLINE funcs.append(x.creator)NEWLINE NEWLINENEWLINE NEWLINENEWLINE
# coding=utf-8NEWLINE# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.NEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINE"""The Large Spanish Corpus is a compilation of Spanish corpora spanning Wikipedia to European parliament notes."""NEWLINENEWLINEfrom __future__ import absolute_import, division, print_functionNEWLINENEWLINEimport osNEWLINENEWLINEimport datasetsNEWLINENEWLINENEWLINE_CITATION = """\NEWLINE@dataset{jose_canete_2019_3247731,NEWLINE author = {José Cañete},NEWLINE title = {Compilation of Large Spanish Unannotated Corpora},NEWLINE month = may,NEWLINE year = 2019,NEWLINE publisher = {Zenodo},NEWLINE doi = {10.5281/zenodo.3247731},NEWLINE url = {https://doi.org/10.5281/zenodo.3247731}NEWLINE}NEWLINE"""NEWLINENEWLINE_DESCRIPTION = """\NEWLINEThe Large Spanish Corpus is a compilation of 15 unlabelled Spanish corpora spanning Wikipedia to European parliament \NEWLINEnotes. Each config contains the data corresponding to a different corpus. For example, "all_wiki" only includes \NEWLINEexamples from Spanish Wikipedia. By default, the config is set to "combined" which loads all the corpora; with this \NEWLINEsetting you can also specify the number of samples to return per corpus by configuring the "split" argument.NEWLINE"""NEWLINENEWLINE_HOMEPAGE = "https://github.com/josecannete/spanish-corpora"NEWLINENEWLINE_LICENSE = "MIT"NEWLINENEWLINE_URL = "https://zenodo.org/record/3247731/files/raw.tar.bz2"NEWLINENEWLINE_CORPORA = [NEWLINE "JRC",NEWLINE "EMEA",NEWLINE "GlobalVoices",NEWLINE "ECB",NEWLINE "DOGC",NEWLINE "all_wikis",NEWLINE "TED",NEWLINE "multiUN",NEWLINE "Europarl",NEWLINE "NewsCommentary11",NEWLINE "UN",NEWLINE "EUBookShop",NEWLINE "ParaCrawl",NEWLINE "OpenSubtitles2018",NEWLINE "DGT",NEWLINE]NEWLINENEWLINE_CORPORA_FILEPATHS = {corpus: os.path.join("spanish-corpora", "raw", f"{corpus}.txt") for corpus in _CORPORA}NEWLINENEWLINE_VERSION = "1.1.0"NEWLINENEWLINE_COMBINED = "combined"NEWLINENEWLINENEWLINEclass LargeSpanishCorpusConfig(datasets.BuilderConfig):NEWLINE def __init__(self, corpora=None, **kwargs):NEWLINE super(LargeSpanishCorpusConfig, self).__init__(version=datasets.Version(_VERSION, ""), **kwargs)NEWLINE self.corpora = corporaNEWLINENEWLINE @propertyNEWLINE def filepaths(self):NEWLINE return [_CORPORA_FILEPATHS[corpus] for corpus in self.corpora]NEWLINENEWLINENEWLINEclass LargeSpanishCorpus(datasets.GeneratorBasedBuilder):NEWLINE """The Large Spanish Corpus."""NEWLINENEWLINE BUILDER_CONFIGS = [NEWLINE LargeSpanishCorpusConfig(name=corpus, corpora=[corpus], description=f"Spanish examples in corpus {corpus}.")NEWLINE for corpus in _CORPORANEWLINE ] + [NEWLINE LargeSpanishCorpusConfig(NEWLINE name=_COMBINED, corpora=_CORPORA, description=f"Complete Spanish dataset with all corpora."NEWLINE )NEWLINE ]NEWLINE BUILDER_CONFIG_CLASS = LargeSpanishCorpusConfigNEWLINE DEFAULT_CONFIG_NAME = _COMBINEDNEWLINENEWLINE def _info(self):NEWLINE return datasets.DatasetInfo(NEWLINE description=_DESCRIPTION,NEWLINE features=datasets.Features(NEWLINE {NEWLINE "text": datasets.Value("string"),NEWLINE }NEWLINE ),NEWLINE supervised_keys=None,NEWLINE homepage=_HOMEPAGE,NEWLINE license=_LICENSE,NEWLINE citation=_CITATION,NEWLINE )NEWLINENEWLINE def _split_generators(self, dl_manager):NEWLINE data_dir = dl_manager.download_and_extract(_URL)NEWLINE return [datasets.SplitGenerator(name=datasets.Split.TRAIN, gen_kwargs={"data_dir": data_dir})]NEWLINENEWLINE def _generate_examples(self, data_dir):NEWLINE for filepath in self.config.filepaths:NEWLINE filepath = os.path.join(data_dir, filepath)NEWLINE _id = 0NEWLINE with open(filepath, mode="r", encoding="utf-8") as f:NEWLINE for line in f:NEWLINE yield _id, {"text": line.strip()},NEWLINE _id += 1NEWLINE
#!/usr/bin/env pythonNEWLINE# Copyright (C) 2012-2013, The CyanogenMod ProjectNEWLINE# (C) 2017-2018,2020-2021, The LineageOS ProjectNEWLINE#NEWLINE# Licensed under the Apache License, Version 2.0 (the "License");NEWLINE# you may not use this file except in compliance with the License.NEWLINE# You may obtain a copy of the License atNEWLINE#NEWLINE# http://www.apache.org/licenses/LICENSE-2.0NEWLINE#NEWLINE# Unless required by applicable law or agreed to in writing, softwareNEWLINE# distributed under the License is distributed on an "AS IS" BASIS,NEWLINE# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.NEWLINE# See the License for the specific language governing permissions andNEWLINE# limitations under the License.NEWLINENEWLINEfrom __future__ import print_functionNEWLINENEWLINEimport base64NEWLINEimport jsonNEWLINEimport netrcNEWLINEimport osNEWLINEimport reNEWLINEimport sysNEWLINEtry:NEWLINE # For python3NEWLINE import urllib.errorNEWLINE import urllib.parseNEWLINE import urllib.requestNEWLINEexcept ImportError:NEWLINE # For python2NEWLINE import impNEWLINE import urllib2NEWLINE import urlparseNEWLINE urllib = imp.new_module('urllib')NEWLINE urllib.error = urllib2NEWLINE urllib.parse = urlparseNEWLINE urllib.request = urllib2NEWLINENEWLINEfrom xml.etree import ElementTreeNEWLINENEWLINEproduct = sys.argv[1]NEWLINENEWLINEif len(sys.argv) > 2:NEWLINE depsonly = sys.argv[2]NEWLINEelse:NEWLINE depsonly = NoneNEWLINENEWLINEtry:NEWLINE device = product[product.index("_") + 1:]NEWLINEexcept:NEWLINE device = productNEWLINENEWLINEif not depsonly:NEWLINE print("Device %s not found. Attempting to retrieve device repository from Havoc-OS Github (http://github.com/Havoc-Devices)." % device)NEWLINENEWLINErepositories = []NEWLINENEWLINEtry:NEWLINE authtuple = netrc.netrc().authenticators("api.github.com")NEWLINENEWLINE if authtuple:NEWLINE auth_string = ('%s:%s' % (authtuple[0], authtuple[2])).encode()NEWLINE githubauth = base64.encodestring(auth_string).decode().replace('\n', '')NEWLINE else:NEWLINE githubauth = NoneNEWLINEexcept:NEWLINE githubauth = NoneNEWLINENEWLINEdef add_auth(githubreq):NEWLINE if githubauth:NEWLINE githubreq.add_header("Authorization","Basic %s" % githubauth)NEWLINENEWLINEif not depsonly:NEWLINE githubreq = urllib.request.Request("https://api.github.com/search/repositories?q=%s+user:Havoc-Devices+in:name+fork:true" % device)NEWLINE add_auth(githubreq)NEWLINE try:NEWLINE result = json.loads(urllib.request.urlopen(githubreq).read().decode())NEWLINE except urllib.error.URLError:NEWLINE print("Failed to search GitHub")NEWLINE sys.exit(1)NEWLINE except ValueError:NEWLINE print("Failed to parse return data from GitHub")NEWLINE sys.exit(1)NEWLINE for res in result.get('items', []):NEWLINE repositories.append(res)NEWLINENEWLINElocal_manifests = r'.repo/local_manifests'NEWLINEif not os.path.exists(local_manifests): os.makedirs(local_manifests)NEWLINENEWLINEdef exists_in_tree(lm, path):NEWLINE for child in lm.getchildren():NEWLINE if child.attrib['path'] == path:NEWLINE return TrueNEWLINE return FalseNEWLINENEWLINE# in-place prettyprint formatterNEWLINEdef indent(elem, level=0):NEWLINE i = "\n" + level*" "NEWLINE if len(elem):NEWLINE if not elem.text or not elem.text.strip():NEWLINE elem.text = i + " "NEWLINE if not elem.tail or not elem.tail.strip():NEWLINE elem.tail = iNEWLINE for elem in elem:NEWLINE indent(elem, level+1)NEWLINE if not elem.tail or not elem.tail.strip():NEWLINE elem.tail = iNEWLINE else:NEWLINE if level and (not elem.tail or not elem.tail.strip()):NEWLINE elem.tail = iNEWLINENEWLINEdef get_manifest_path():NEWLINE '''Find the current manifest pathNEWLINE In old versions of repo this is at .repo/manifest.xmlNEWLINE In new versions, .repo/manifest.xml includes an includeNEWLINE to some arbitrary file in .repo/manifests'''NEWLINENEWLINE m = ElementTree.parse(".repo/manifest.xml")NEWLINE try:NEWLINE m.findall('default')[0]NEWLINE return '.repo/manifest.xml'NEWLINE except IndexError:NEWLINE return ".repo/manifests/{}".format(m.find("include").get("name"))NEWLINENEWLINEdef get_default_revision():NEWLINE m = ElementTree.parse(get_manifest_path())NEWLINE d = m.findall('default')[0]NEWLINE r = d.get('revision')NEWLINE return r.replace('refs/heads/', '').replace('refs/tags/', '')NEWLINENEWLINEdef get_from_manifest(devicename):NEWLINE try:NEWLINE lm = ElementTree.parse(".repo/local_manifests/roomservice.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if re.search("android_device_.*_%s$" % device, localpath.get("name")):NEWLINE return localpath.get("path")NEWLINENEWLINE return NoneNEWLINENEWLINEdef is_in_manifest(projectpath):NEWLINE try:NEWLINE lm = ElementTree.parse(".repo/local_manifests/roomservice.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if localpath.get("path") == projectpath:NEWLINE return TrueNEWLINENEWLINE # Search in main manifest, tooNEWLINE try:NEWLINE lm = ElementTree.parse(get_manifest_path())NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if localpath.get("path") == projectpath:NEWLINE return TrueNEWLINENEWLINE # ... and don't forget the havoc snippetNEWLINE try:NEWLINE lm = ElementTree.parse(".repo/manifests/snippets/havoc.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for localpath in lm.findall("project"):NEWLINE if localpath.get("path") == projectpath:NEWLINE return TrueNEWLINENEWLINE return FalseNEWLINENEWLINEdef add_to_manifest(repositories, fallback_branch = None):NEWLINE try:NEWLINE lm = ElementTree.parse(".repo/local_manifests/roomservice.xml")NEWLINE lm = lm.getroot()NEWLINE except:NEWLINE lm = ElementTree.Element("manifest")NEWLINENEWLINE for repository in repositories:NEWLINE repo_name = repository['repository']NEWLINE repo_target = repository['target_path']NEWLINE print('Checking if %s is fetched from %s' % (repo_target, repo_name))NEWLINE if is_in_manifest(repo_target):NEWLINE print('Havoc-Devices/%s already fetched to %s' % (repo_name, repo_target))NEWLINE continueNEWLINENEWLINE print('Adding dependency: Havoc-Devices/%s -> %s' % (repo_name, repo_target))NEWLINE project = ElementTree.Element("project", attrib = { "path": repo_target,NEWLINE "remote": "havoc-devices", "name": "%s" % repo_name })NEWLINENEWLINE if 'branch' in repository:NEWLINE project.set('revision',repository['branch'])NEWLINE elif fallback_branch:NEWLINE print("Using fallback branch %s for %s" % (fallback_branch, repo_name))NEWLINE project.set('revision', fallback_branch)NEWLINE else:NEWLINE print("Using default branch for %s" % repo_name)NEWLINENEWLINE lm.append(project)NEWLINENEWLINE indent(lm, 0)NEWLINE raw_xml = ElementTree.tostring(lm).decode()NEWLINE raw_xml = '<?xml version="1.0" encoding="UTF-8"?>\n' + raw_xmlNEWLINENEWLINE f = open('.repo/local_manifests/roomservice.xml', 'w')NEWLINE f.write(raw_xml)NEWLINE f.close()NEWLINENEWLINEdef fetch_dependencies(repo_path, fallback_branch = None):NEWLINE print('Looking for dependencies in %s' % repo_path)NEWLINE dependencies_path = repo_path + '/havoc.dependencies'NEWLINE syncable_repos = []NEWLINE verify_repos = []NEWLINENEWLINE if os.path.exists(dependencies_path):NEWLINE dependencies_file = open(dependencies_path, 'r')NEWLINE dependencies = json.loads(dependencies_file.read())NEWLINE fetch_list = []NEWLINENEWLINE for dependency in dependencies:NEWLINE if not is_in_manifest(dependency['target_path']):NEWLINE fetch_list.append(dependency)NEWLINE syncable_repos.append(dependency['target_path'])NEWLINE verify_repos.append(dependency['target_path'])NEWLINE else:NEWLINE verify_repos.append(dependency['target_path'])NEWLINENEWLINE if not os.path.isdir(dependency['target_path']):NEWLINE syncable_repos.append(dependency['target_path'])NEWLINENEWLINE dependencies_file.close()NEWLINENEWLINE if len(fetch_list) > 0:NEWLINE print('Adding dependencies to manifest')NEWLINE add_to_manifest(fetch_list, fallback_branch)NEWLINE else:NEWLINE print('%s has no additional dependencies.' % repo_path)NEWLINENEWLINE if len(syncable_repos) > 0:NEWLINE print('Syncing dependencies')NEWLINE os.system('repo sync --force-sync %s' % ' '.join(syncable_repos))NEWLINENEWLINE for deprepo in verify_repos:NEWLINE fetch_dependencies(deprepo)NEWLINENEWLINEdef has_branch(branches, revision):NEWLINE return revision in [branch['name'] for branch in branches]NEWLINENEWLINEif depsonly:NEWLINE repo_path = get_from_manifest(device)NEWLINE if repo_path:NEWLINE fetch_dependencies(repo_path)NEWLINE else:NEWLINE print("Trying dependencies-only mode on a non-existing device tree?")NEWLINENEWLINE sys.exit()NEWLINENEWLINEelse:NEWLINE for repository in repositories:NEWLINE repo_name = repository['name']NEWLINE if re.match(r"^android_device_[^_]*_" + device + "$", repo_name):NEWLINE print("Found repository: %s" % repository['name'])NEWLINE NEWLINE manufacturer = repo_name.replace("android_device_", "").replace("_" + device, "")NEWLINE NEWLINE default_revision = get_default_revision()NEWLINE print("Default revision: %s" % default_revision)NEWLINE print("Checking branch info")NEWLINE githubreq = urllib.request.Request(repository['branches_url'].replace('{/branch}', ''))NEWLINE add_auth(githubreq)NEWLINE result = json.loads(urllib.request.urlopen(githubreq).read().decode())NEWLINENEWLINE ## Try tags, too, since that's what releases useNEWLINE if not has_branch(result, default_revision):NEWLINE githubreq = urllib.request.Request(repository['tags_url'].replace('{/tag}', ''))NEWLINE add_auth(githubreq)NEWLINE result.extend (json.loads(urllib.request.urlopen(githubreq).read().decode()))NEWLINE NEWLINE repo_path = "device/%s/%s" % (manufacturer, device)NEWLINE adding = {'repository':repo_name,'target_path':repo_path}NEWLINE NEWLINE fallback_branch = NoneNEWLINE if not has_branch(result, default_revision):NEWLINE if os.getenv('ROOMSERVICE_BRANCHES'):NEWLINE fallbacks = list(filter(bool, os.getenv('ROOMSERVICE_BRANCHES').split(' ')))NEWLINE for fallback in fallbacks:NEWLINE if has_branch(result, fallback):NEWLINE print("Using fallback branch: %s" % fallback)NEWLINE fallback_branch = fallbackNEWLINE breakNEWLINENEWLINE if not fallback_branch:NEWLINE print("Default revision %s not found in %s. Bailing." % (default_revision, repo_name))NEWLINE print("Branches found:")NEWLINE for branch in [branch['name'] for branch in result]:NEWLINE print(branch)NEWLINE print("Use the ROOMSERVICE_BRANCHES environment variable to specify a list of fallback branches.")NEWLINE sys.exit()NEWLINENEWLINE add_to_manifest([adding], fallback_branch)NEWLINENEWLINE print("Syncing repository to retrieve project.")NEWLINE os.system('repo sync --force-sync %s' % repo_path)NEWLINE print("Repository synced!")NEWLINENEWLINE fetch_dependencies(repo_path, fallback_branch)NEWLINE print("Done")NEWLINE sys.exit()NEWLINENEWLINEprint("Repository for %s not found in the Havoc-Devices Github repository list. If this is in error, you may need to manually add it to your local_manifests/roomservice.xml." % device)NEWLINE
#NEWLINE# This file is part of LiteX-Boards.NEWLINE#NEWLINE# Copyright (c) 2021 Franck Jullien <franck.jullien@collshade.fr>NEWLINE# SPDX-License-Identifier: BSD-2-ClauseNEWLINENEWLINEfrom litex.build.generic_platform import *NEWLINEfrom litex.build.efinix.platform import EfinixPlatformNEWLINEfrom litex.build.efinix import EfinixProgrammerNEWLINENEWLINE# IOs ----------------------------------------------------------------------------------------------NEWLINENEWLINE_io = [NEWLINE # ClkNEWLINE ("clk25", 0, Pins("B2"), IOStandard("1.8_V_LVCMOS")),NEWLINE ("clk33", 0, Pins("P2"), IOStandard("1.8_V_LVCMOS")),NEWLINE ("clk74_25", 0, Pins("A11"), IOStandard("1.8_V_LVCMOS")),NEWLINENEWLINE # SD-CardNEWLINE ("spisdcard", 0,NEWLINE Subsignal("clk", Pins("B12")),NEWLINE Subsignal("mosi", Pins("C12"), Misc("WEAK_PULLUP")),NEWLINE Subsignal("cs_n", Pins("A12"), Misc("WEAK_PULLUP")),NEWLINE Subsignal("miso", Pins("B14"), Misc("WEAK_PULLUP")),NEWLINE IOStandard("1.8_V_LVCMOS"),NEWLINE ),NEWLINE ("sdcard", 0,NEWLINE Subsignal("data", Pins("B14 A14 D12 A12"), Misc("WEAK_PULLUP")),NEWLINE Subsignal("cmd", Pins("C12"), Misc("WEAK_PULLUP")),NEWLINE Subsignal("clk", Pins("B12")),NEWLINE IOStandard("3.3_V_LVCMOS"),NEWLINE ),NEWLINENEWLINE # SerialNEWLINE ("serial", 0,NEWLINE Subsignal("tx", Pins("R4")),NEWLINE Subsignal("rx", Pins("R3")),NEWLINE IOStandard("3.3_V_LVCMOS"), Misc("WEAK_PULLUP")NEWLINE ),NEWLINENEWLINE # LedsNEWLINE ("user_led", 0,NEWLINE Subsignal("r", Pins("J15")),NEWLINE Subsignal("g", Pins("H10")),NEWLINE Subsignal("b", Pins("K14")),NEWLINE IOStandard("1.8_V_LVCMOS"),NEWLINE ),NEWLINE ("user_led", 1,NEWLINE Subsignal("r", Pins("H15")),NEWLINE Subsignal("g", Pins("H11")),NEWLINE Subsignal("b", Pins("J14")),NEWLINE IOStandard("1.8_V_LVCMOS"),NEWLINE ),NEWLINENEWLINE # ButtonsNEWLINE ("user_btn", 0, Pins("K13"), IOStandard("1.8_V_LVCMOS")),NEWLINE ("user_btn", 1, Pins("J13"), IOStandard("1.8_V_LVCMOS")),NEWLINE ("user_btn", 2, Pins("C5"), IOStandard("1.8_V_LVCMOS")),NEWLINE ("user_btn", 3, Pins("R13"), IOStandard("1.8_V_LVCMOS")),NEWLINENEWLINE # SwitchesNEWLINE ("user_sw", 0, Pins("F3"), IOStandard("1.8_V_LVCMOS")),NEWLINE ("user_sw", 1, Pins("E3"), IOStandard("1.8_V_LVCMOS")),NEWLINENEWLINE # SPIFlashNEWLINE ("spiflash", 0,NEWLINE Subsignal("cs_n", Pins("P1")),NEWLINE Subsignal("clk", Pins("N1")),NEWLINE Subsignal("mosi", Pins("M1")),NEWLINE Subsignal("miso", Pins("L1")),NEWLINE IOStandard("1.8_V_LVCMOS")NEWLINE ),NEWLINENEWLINE # HyperRAM (X16)NEWLINE ("hyperram", 0,NEWLINE Subsignal("dq", Pins(NEWLINE "B6 C6 A5 A6 F7 F8 E7 D7",NEWLINE "B9 A9 F9 E9 C10 D10 A10 B10"NEWLINE ), IOStandard("1.8_V_LVCMOS")),NEWLINE Subsignal("rwds", Pins("B8 C8"), IOStandard("1.8_V_LVCMOS")),NEWLINE Subsignal("cs_n", Pins("A8"), IOStandard("1.8_V_LVCMOS")),NEWLINE Subsignal("rst_n", Pins("D5"), IOStandard("1.8_V_LVCMOS")),NEWLINE Subsignal("clk", Pins("B7"), IOStandard("1.8_V_LVCMOS")),NEWLINE Misc("SLEWRATE=FAST")NEWLINE ),NEWLINENEWLINE # MIPINEWLINE ("mipi_tx", 0,NEWLINE Subsignal("clk", Pins("D13"), IOStandard("1.2_V_LVCMOS")),NEWLINE Subsignal("data0", Pins("C15"), IOStandard("1.2_V_LVCMOS")),NEWLINE Subsignal("data1", Pins("D14"), IOStandard("1.2_V_LVCMOS")),NEWLINE Subsignal("data2", Pins("E14"), IOStandard("1.2_V_LVCMOS")),NEWLINE Subsignal("data3", Pins("E12"), IOStandard("1.2_V_LVCMOS")),NEWLINE Misc("SLEWRATE=FAST")NEWLINE ),NEWLINE]NEWLINENEWLINEiobank_info = [NEWLINE ("1A", "1.8 V LVCMOS"),NEWLINE ("1B", "1.8 V LVCMOS"),NEWLINE ("2A", "1.8 V LVCMOS"),NEWLINE ("2B", "1.8 V LVCMOS"),NEWLINE ("3A", "1.8 V LVCMOS"),NEWLINE ("3B", "1.8 V LVCMOS"),NEWLINE ("4A", "1.8 V LVCMOS"),NEWLINE ("4B", "1.8 V LVCMOS"),NEWLINE ("BL", "3.3 V LVCMOS"),NEWLINE ("BR", "1.8 V LVCMOS"),NEWLINE ("TL", "1.8 V LVCMOS"),NEWLINE ("TR", "3.3 V LVCMOS"),NEWLINE]NEWLINENEWLINE# Connectors ---------------------------------------------------------------------------------------NEWLINENEWLINE_connectors = [NEWLINE ["P1", " - H14 - G14 - - F12 G13 E12 F13 - - E15 H13 E14 H12 - - C13 G15 D13 F15",NEWLINE " - - D15 G11 D14 F11 - - C14 N14 C15 P14 - - K4 A4 J3 B5"],NEWLINE]NEWLINENEWLINE# Platform -----------------------------------------------------------------------------------------NEWLINENEWLINEclass Platform(EfinixPlatform):NEWLINE default_clk_name = "clk25"NEWLINE default_clk_period = 1e9/50e6NEWLINENEWLINE def __init__(self, toolchain="efinity"):NEWLINE EfinixPlatform.__init__(self, "Ti60F225C3", _io, _connectors, iobank_info=iobank_info, toolchain=toolchain)NEWLINENEWLINE def create_programmer(self):NEWLINE return EfinixProgrammer()NEWLINENEWLINE def do_finalize(self, fragment):NEWLINE EfinixPlatform.do_finalize(self, fragment)NEWLINE self.add_period_constraint(self.lookup_request("clk25", loose=True), 1e9/50e6)NEWLINE
import timeNEWLINEfrom importlib import import_moduleNEWLINENEWLINEfrom django.conf import settingsNEWLINEfrom django.contrib.sessions.backends.base import UpdateErrorNEWLINEfrom django.contrib.sessions.middleware import SessionMiddlewareNEWLINEfrom django.core.exceptions import SuspiciousOperationNEWLINEfrom django.utils.cache import patch_vary_headersNEWLINEfrom django.utils.http import http_dateNEWLINENEWLINENEWLINEclass SamlSessionMiddleware(SessionMiddleware):NEWLINE cookie_name = getattr(settings, 'SAML_SESSION_COOKIE_NAME', 'saml_session')NEWLINENEWLINE def process_request(self, request):NEWLINE session_key = request.COOKIES.get(self.cookie_name, None)NEWLINE request.saml_session = self.SessionStore(session_key)NEWLINENEWLINE def process_response(self, request, response):NEWLINE """NEWLINE If request.saml_session was modified, or if the configuration is to save theNEWLINE session every time, save the changes and set a session cookie or deleteNEWLINE the session cookie if the session has been emptied.NEWLINE """NEWLINE try:NEWLINE accessed = request.saml_session.accessedNEWLINE modified = request.saml_session.modifiedNEWLINE empty = request.saml_session.is_empty()NEWLINE except AttributeError:NEWLINE return responseNEWLINE # First check if we need to delete this cookie.NEWLINE # The session should be deleted only if the session is entirely empty.NEWLINE if self.cookie_name in request.COOKIES and empty:NEWLINE response.delete_cookie(NEWLINE self.cookie_name,NEWLINE path=settings.SESSION_COOKIE_PATH,NEWLINE domain=settings.SESSION_COOKIE_DOMAIN,NEWLINE samesite=None,NEWLINE )NEWLINE patch_vary_headers(response, ('Cookie',))NEWLINE else:NEWLINE if accessed:NEWLINE patch_vary_headers(response, ('Cookie',))NEWLINE # relies and the global oneNEWLINE if (modified or settings.SESSION_SAVE_EVERY_REQUEST) and not empty:NEWLINE if request.session.get_expire_at_browser_close():NEWLINE max_age = NoneNEWLINE expires = NoneNEWLINE else:NEWLINE max_age = getattr(request, self.cookie_name).get_expiry_age()NEWLINE expires_time = time.time() + max_ageNEWLINE expires = http_date(expires_time)NEWLINE # Save the session data and refresh the client cookie.NEWLINE # Skip session save for 500 responses, refs #3881.NEWLINE if response.status_code != 500:NEWLINE try:NEWLINE request.saml_session.save()NEWLINE except UpdateError:NEWLINE raise SuspiciousOperation(NEWLINE "The request's session was deleted before the "NEWLINE "request completed. The user may have logged "NEWLINE "out in a concurrent request, for example."NEWLINE )NEWLINE response.set_cookie(NEWLINE self.cookie_name,NEWLINE request.saml_session.session_key,NEWLINE max_age=max_age,NEWLINE expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,NEWLINE path=settings.SESSION_COOKIE_PATH,NEWLINE secure=settings.SESSION_COOKIE_SECURE or None,NEWLINE httponly=settings.SESSION_COOKIE_HTTPONLY or None,NEWLINE samesite=NoneNEWLINE )NEWLINE return responseNEWLINE
#FatiamentoNEWLINEfrase = 'Curso em video python'NEWLINE#Ex01|Obs: Desmostra um celula no parametro indicadoNEWLINEprint(frase[3])NEWLINE#Ex02/OBS: sempre será escluido última casa para mostragem NEWLINEprint(frase[3:15])NEWLINE#Ex03|Obs: seleção e alternação na mostragem definido parametro NEWLINEprint(frase[3:21:2])NEWLINE#Ex04|Obs: sem parametro de inicio não for definido será como padrão 0NEWLINEprint(frase[:4])NEWLINE#Ex05| Obs: sem parametro do fim será definido como padrão última celulaNEWLINEprint(frase[15:])NEWLINE#Ex06|Obs: seleção sem um dos paremetros e alternação na mostragem definido por parametroNEWLINEprint(frase[9::3])NEWLINE
import kerasNEWLINEfrom keras import modelsNEWLINEfrom keras.models import load_modelNEWLINEimport randomNEWLINEfrom preprocessing import get_kmer_from_realdata,DataGenerator_from_realdataNEWLINEfrom voting import get_final_resultNEWLINEfrom generate_report import save_reportNEWLINENEWLINE#path for testing fileNEWLINEfilepath_train="./data/real-world-data/sars-cov2-UW1-subtracted.fastq"NEWLINE#path for trained modelNEWLINEfilepath_model="./data/pretrained_model.h5"NEWLINE#path for saving final reportNEWLINEfilepath_report="./data/final_report_UW1"NEWLINENEWLINEd_nucl={"A":0,"C":1,"G":2,"T":3,"N":4}NEWLINEf_matrix,f_index=get_kmer_from_realdata(filepath_train)NEWLINEtesting_generator = DataGenerator_from_realdata(f_matrix,f_index)NEWLINENEWLINEmodel=load_model(filepath_model)NEWLINEhist = model.predict_generator(testing_generator,NEWLINE verbose=1NEWLINE )NEWLINENEWLINEpredicted_labels_list=[i.argmax(axis=-1) for i in hist[0]]NEWLINEpredicted_prob_list=[max(i) for i in hist[0]]NEWLINEpredicted_loc_list=[i.argmax(axis=-1) for i in hist[1]]NEWLINEpredicted_loc_prob_list=[max(i) for i in hist[1]]NEWLINENEWLINEfinal_label=[]NEWLINEfinal_loc=[]NEWLINEnum_iters=len(f_index)NEWLINEfor i in range(0,num_iters):NEWLINE if i==0:NEWLINE tmp_label,tmp_loc=get_final_result(predicted_labels_list[0:f_index[i]],predicted_prob_list[0:f_index[i]],predicted_loc_list[0:f_index[i]],predicted_loc_prob_list[0:f_index[i]])NEWLINE else:NEWLINE tmp_label,tmp_loc=get_final_result(predicted_labels_list[f_index[i-1]:f_index[i]],predicted_prob_list[f_index[i-1]:f_index[i]],predicted_loc_list[f_index[i-1]:f_index[i]],predicted_loc_prob_list[f_index[i-1]:f_index[i]])NEWLINE final_label.append(tmp_label)NEWLINE final_loc.append(tmp_loc)NEWLINENEWLINEsave_report(filepath_report,final_label,final_loc)NEWLINE
"""Handle sound for a given maze"""NEWLINENEWLINEimport pygameNEWLINEimport pygame.mixerNEWLINENEWLINEfrom ..designpattern import event, observerNEWLINEfrom ..model import events, mazeNEWLINEfrom . import entity_soundNEWLINEfrom . import load_soundNEWLINENEWLINENEWLINE# TODO: Adjust volume of different sounds ?NEWLINEclass MazeSound(observer.Observer):NEWLINE """Handle all the sounds of the maze"""NEWLINENEWLINE solved = "MazeSolved.wav"NEWLINE failed = "MazeFailed.wav"NEWLINE extra_game = "ExtraGame.wav"NEWLINE hurry_up = "HurryUp.wav"NEWLINE extra_life = "ExtraLife.wav"NEWLINENEWLINE def __init__(self, maze_: maze.Maze) -> None:NEWLINE """ConstructorNEWLINENEWLINE Args:NEWLINE maze_ (maze.Maze): The maze to representNEWLINE """NEWLINE super().__init__()NEWLINENEWLINE self.maze = maze_NEWLINE self.maze.add_observer(self)NEWLINE self.running = FalseNEWLINE self.failed_sound = load_sound(self.failed)NEWLINE self.solved_sound = load_sound(self.solved)NEWLINE self.extra_game_sound = load_sound(self.extra_game)NEWLINE self.hurry_up_sound = load_sound(self.hurry_up)NEWLINE self.extra_life_sound = load_sound(self.extra_life)NEWLINENEWLINE # Set of all the views for each component of the mazeNEWLINE self.entity_sounds = {entity_sound.EntitySound.from_entity(entity_) for entity_ in self.maze.entities}NEWLINENEWLINE # Start the music if loadedNEWLINE try:NEWLINE pygame.mixer.music.play(-1)NEWLINE except pygame.error:NEWLINE pass # If not loadedNEWLINENEWLINE def notify(self, event_: event.Event) -> None:NEWLINE if isinstance(event_, events.NewEntityEvent):NEWLINE self.entity_sounds.add(entity_sound.EntitySound.from_entity(event_.entity))NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.RemovedEntityEvent):NEWLINE for sound in self.entity_sounds:NEWLINE if sound.entity == event_.entity:NEWLINE self.entity_sounds.remove(sound)NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.MazeFailedEvent):NEWLINE pygame.mixer.music.stop()NEWLINE self.failed_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.MazeSolvedEvent):NEWLINE pygame.mixer.music.stop()NEWLINE self.solved_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.ExtraGameEvent):NEWLINE self.extra_game_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.HurryUpEvent):NEWLINE self.hurry_up_sound.play()NEWLINE returnNEWLINENEWLINE if isinstance(event_, events.ExtraLifeEvent):NEWLINE self.extra_life_sound.play()NEWLINE returnNEWLINE
from flask import Flask, render_templateNEWLINEfrom flask_sqlalchemy import SQLAlchemyNEWLINEimport osNEWLINENEWLINEapp = Flask(__name__)NEWLINENEWLINEdb_path = os.path.join(os.path.dirname(__file__), 'database.db')NEWLINEdb_uri = 'sqlite:///{}'.format(db_path)NEWLINEapp.config['SQLALCHEMY_DATABASE_URI'] = db_uriNEWLINEapp.config['SECRET_KEY'] = 'thisisasecret'NEWLINENEWLINE@app.route('/')NEWLINEdef index():NEWLINE return render_template('index.html')NEWLINENEWLINE@app.route('/about')NEWLINEdef about():NEWLINE return render_template('about.html')NEWLINENEWLINE@app.route('/base')NEWLINEdef base():NEWLINE return render_template('base.html')NEWLINENEWLINE@app.route('/register')NEWLINEdef register():NEWLINE return render_template('register.html')NEWLINENEWLINE@app.route('/reservation')NEWLINEdef reservation():NEWLINE return render_template('reservation.html')NEWLINENEWLINE@app.route('/sign_in')NEWLINEdef sign_in():NEWLINE return render_template('sign_in.html')NEWLINENEWLINEif __name__ == '__main__':NEWLINE app.run(debug=True)